sessionInfo()
## R version 3.5.2 (2018-12-20)
## Platform: x86_64-w64-mingw32/x64 (64-bit)
## Running under: Windows 10 x64 (build 17763)
## 
## Matrix products: default
## 
## locale:
## [1] LC_COLLATE=English_United States.1252 
## [2] LC_CTYPE=English_United States.1252   
## [3] LC_MONETARY=English_United States.1252
## [4] LC_NUMERIC=C                          
## [5] LC_TIME=English_United States.1252    
## 
## attached base packages:
## [1] stats     graphics  grDevices utils     datasets  methods   base     
## 
## loaded via a namespace (and not attached):
##  [1] compiler_3.5.2  magrittr_1.5    tools_3.5.2     htmltools_0.3.6
##  [5] yaml_2.2.0      Rcpp_1.0.0      stringi_1.2.4   rmarkdown_1.11 
##  [9] knitr_1.21      stringr_1.3.1   xfun_0.4        digest_0.6.18  
## [13] evaluate_0.12

User Inputs

output.var = params$output.var 

transform.abs = FALSE
log.pred = FALSE
norm.pred = FALSE
if (params$trans == 1){
  transform.abs == TRUE
}else if (params$trans == 2){
  log.pred = TRUE
}else if (params$trans == 3){
  norm.pred = TRUE  
}else{
  message("You have chosen no transformation")
}

eda = params$eda
algo.forward = params$algo.forward
algo.backward = params$algo.backward
algo.stepwise = params$algo.stepwise
algo.LASSO = params$algo.LASSO
algo.LARS = params$algo.LARS
  
algo.forward.caret = params$algo.forward.caret
algo.backward.caret = params$algo.backward.caret
algo.stepwise.caret = params$algo.stepwise.caret
algo.LASSO.caret = params$algo.LASSO.caret
algo.LARS.caret = params$algo.LARS.caret

message("Parameters used for training/prediction: ")
## Parameters used for training/prediction:
str(params)
## List of 13
##  $ output.var         : chr "y3"
##  $ trans              : int 2
##  $ eda                : logi FALSE
##  $ algo.forward       : logi FALSE
##  $ algo.backward      : logi FALSE
##  $ algo.stepwise      : logi FALSE
##  $ algo.LASSO         : logi FALSE
##  $ algo.LARS          : logi FALSE
##  $ algo.forward.caret : logi TRUE
##  $ algo.backward.caret: logi TRUE
##  $ algo.stepwise.caret: logi TRUE
##  $ algo.LASSO.caret   : logi TRUE
##  $ algo.LARS.caret    : logi TRUE
# Setup Labels
# alt.scale.label.name = Alternate Scale variable name
#   - if predicting on log, then alt.scale is normal scale
#   - if predicting on normal scale, then alt.scale is log scale
if (log.pred == TRUE){
  label.names = paste('log.',output.var,sep="")
  alt.scale.label.name = output.var
}
if (log.pred == FALSE & norm.pred==FALSE){
  label.names = output.var
  alt.scale.label.name = paste('log.',output.var,sep="")
}
if (norm.pred==TRUE){
  label.names = paste('norm.',output.var,sep="")
  alt.scale.label.name = output.var
}

Prepare Data

Read and Clean Features

features = read.csv("../../Data/features.csv")
features.highprec = read.csv("../../Data/features_highprec.csv")
all.equal(features, features.highprec)
##  [1] "Component \"x11\": Mean relative difference: 0.001401482"     
##  [2] "Component \"stat9\": Mean relative difference: 0.0002946299"  
##  [3] "Component \"stat12\": Mean relative difference: 0.0005151515" 
##  [4] "Component \"stat13\": Mean relative difference: 0.001354369"  
##  [5] "Component \"stat18\": Mean relative difference: 0.0005141104" 
##  [6] "Component \"stat22\": Mean relative difference: 0.001135977"  
##  [7] "Component \"stat25\": Mean relative difference: 0.0001884615" 
##  [8] "Component \"stat29\": Mean relative difference: 0.001083691"  
##  [9] "Component \"stat36\": Mean relative difference: 0.00021513"   
## [10] "Component \"stat37\": Mean relative difference: 0.0004578125" 
## [11] "Component \"stat43\": Mean relative difference: 0.0003473684" 
## [12] "Component \"stat45\": Mean relative difference: 0.0002951699" 
## [13] "Component \"stat46\": Mean relative difference: 0.0009745763" 
## [14] "Component \"stat47\": Mean relative difference: 8.829902e-05" 
## [15] "Component \"stat55\": Mean relative difference: 0.001438066"  
## [16] "Component \"stat57\": Mean relative difference: 0.0001056911" 
## [17] "Component \"stat58\": Mean relative difference: 0.0004882261" 
## [18] "Component \"stat60\": Mean relative difference: 0.0002408377" 
## [19] "Component \"stat62\": Mean relative difference: 0.0004885106" 
## [20] "Component \"stat66\": Mean relative difference: 1.73913e-06"  
## [21] "Component \"stat67\": Mean relative difference: 0.0006265823" 
## [22] "Component \"stat73\": Mean relative difference: 0.003846154"  
## [23] "Component \"stat75\": Mean relative difference: 0.002334906"  
## [24] "Component \"stat83\": Mean relative difference: 0.0005628415" 
## [25] "Component \"stat86\": Mean relative difference: 0.0006104418" 
## [26] "Component \"stat94\": Mean relative difference: 0.001005115"  
## [27] "Component \"stat97\": Mean relative difference: 0.0003551913" 
## [28] "Component \"stat98\": Mean relative difference: 0.0006157635" 
## [29] "Component \"stat106\": Mean relative difference: 0.0008267717"
## [30] "Component \"stat109\": Mean relative difference: 0.0005121359"
## [31] "Component \"stat110\": Mean relative difference: 0.0007615527"
## [32] "Component \"stat111\": Mean relative difference: 0.001336134" 
## [33] "Component \"stat114\": Mean relative difference: 7.680492e-05"
## [34] "Component \"stat117\": Mean relative difference: 0.0002421784"
## [35] "Component \"stat122\": Mean relative difference: 0.0006521084"
## [36] "Component \"stat123\": Mean relative difference: 8.333333e-05"
## [37] "Component \"stat125\": Mean relative difference: 0.002385135" 
## [38] "Component \"stat130\": Mean relative difference: 0.001874016" 
## [39] "Component \"stat132\": Mean relative difference: 0.0003193182"
## [40] "Component \"stat135\": Mean relative difference: 0.0001622517"
## [41] "Component \"stat136\": Mean relative difference: 7.722008e-05"
## [42] "Component \"stat138\": Mean relative difference: 0.0009739953"
## [43] "Component \"stat143\": Mean relative difference: 0.0004845361"
## [44] "Component \"stat146\": Mean relative difference: 0.0005821596"
## [45] "Component \"stat148\": Mean relative difference: 0.0005366922"
## [46] "Component \"stat153\": Mean relative difference: 0.0001557522"
## [47] "Component \"stat154\": Mean relative difference: 0.001351916" 
## [48] "Component \"stat157\": Mean relative difference: 0.0005427928"
## [49] "Component \"stat162\": Mean relative difference: 0.002622951" 
## [50] "Component \"stat167\": Mean relative difference: 0.0005905172"
## [51] "Component \"stat168\": Mean relative difference: 0.0002791096"
## [52] "Component \"stat169\": Mean relative difference: 0.0004121827"
## [53] "Component \"stat170\": Mean relative difference: 0.0004705882"
## [54] "Component \"stat174\": Mean relative difference: 0.0003822894"
## [55] "Component \"stat179\": Mean relative difference: 0.0008286604"
## [56] "Component \"stat184\": Mean relative difference: 0.0007526718"
## [57] "Component \"stat187\": Mean relative difference: 0.0005122768"
## [58] "Component \"stat193\": Mean relative difference: 4.215116e-05"
## [59] "Component \"stat199\": Mean relative difference: 0.002155844" 
## [60] "Component \"stat203\": Mean relative difference: 0.0003738318"
## [61] "Component \"stat213\": Mean relative difference: 0.000667676" 
## [62] "Component \"stat215\": Mean relative difference: 0.0003997955"
head(features)
##     JobName        x1       x2       x3        x4       x5        x6
## 1 Job_00001 2.0734508 4.917267 19.96188  3.520878 7.861051 1.6067589
## 2 Job_00002 2.2682543 4.955773 19.11939 19.763031 6.931355 1.3622041
## 3 Job_00003 1.7424456 2.059819 13.37912 38.829132 6.274053 2.0529845
## 4 Job_00004 0.7873555 2.613983 17.23044 64.402557 5.377652 0.9067419
## 5 Job_00005 2.3342753 4.299076 14.64883 52.537304 6.793368 2.4605792
## 6 Job_00006 1.2365089 2.795370 11.13127 96.819939 6.583971 2.3510606
##         x7        x8       x9       x10      x11      x12       x13
## 1 2.979479  8.537228 1.103368 4.6089458 1.05e-07 7.995825 13.215498
## 2 2.388119  6.561461 0.588572 1.0283282 1.03e-07 7.486966 22.557224
## 3 2.043592 10.275595 4.834385 4.3872848 1.06e-07 6.350142 15.049810
## 4 2.395118 13.487331 3.340190 4.5053501 9.47e-08 9.548698 17.170635
## 5 2.891535  9.362389 1.246039 1.7333300 1.01e-07 9.596095  5.794567
## 6 1.247838  7.033354 1.852231 0.4839371 1.07e-07 3.810983 23.863169
##        x14       x15       x16      x17      x18      x19      x20
## 1 4.377983 0.2370623  6.075459 3.988347 4.767475 2.698775 1.035893
## 2 2.059315 0.5638121  6.903891 4.152054 6.849232 9.620731 1.915288
## 3 3.260057 2.0603445  8.424065 4.489893 3.493591 4.715386 1.558103
## 4 3.093478 1.8806034 11.189792 2.134271 5.588357 5.107871 1.489588
## 5 3.943076 1.5820830  7.096742 3.563378 7.765610 1.360272 1.240283
## 6 1.280562 1.1733382  7.062051 1.341864 7.748325 5.009365 1.725179
##        x21      x22      x23      stat1      stat2      stat3      stat4
## 1 42.36548 1.356213 2.699796  2.3801832  0.1883335 -1.2284011 -0.5999233
## 2 26.63295 4.053961 2.375127 -1.4069480  1.8140973  1.6204884  2.6422672
## 3 20.09693 3.079888 4.488420 -0.7672566 -0.1230289  1.1415752  2.9805934
## 4 32.60415 1.355396 3.402398  0.4371202 -1.9355906  0.9028624 -1.6025400
## 5 44.58361 1.940301 2.249011  2.4492466 -0.6172000 -2.5520642 -2.1485929
## 6 28.75102 2.500499 5.563972 -1.7899084  1.8853619  2.4154840 -2.6022179
##          stat5      stat6      stat7      stat8      stat9      stat10
## 1  0.148893163 -0.6622978 -2.4851868  0.3647782  2.5364335  2.92067981
## 2  1.920768980  1.7411555 -1.9599979 -2.0190558 -1.3732762 -0.31642506
## 3  2.422584300 -0.4166040  2.2205689 -2.6741531  0.4844292  2.73379230
## 4 -0.001795933 -0.6946563 -0.3693534 -0.9709467  1.7960306  0.74771154
## 5 -2.311132430 -1.0166832  2.7269876  1.5424492 -1.3156369 -0.09767897
## 6 -1.785491470 -1.8599915  1.4875095  2.0188572 -1.4892503 -1.41103566
##       stat11    stat12     stat13     stat14     stat15     stat16
## 1 -2.3228905 -2.480567 -0.6335157 -0.3650149 -0.5322812  0.6029300
## 2 -0.8547903  1.119316  0.7227427  0.2121097 -0.1452281 -2.0361528
## 3 -2.1821580  2.865401 -2.9756081  2.9871745  1.9539525 -1.8857163
## 4  1.3982378  1.856765 -1.0379983  2.3341896  2.3057184 -2.8947697
## 5  0.9567220  2.567549  0.3184886  1.0307668  0.1644241 -0.6613821
## 6  0.5341771 -1.461822  0.4402476 -1.9282095 -0.3680157  1.8188807
##        stat17     stat18     stat19     stat20     stat21     stat22
## 1 -1.04516208  2.3544915  2.4049001  0.2633883 -0.9788178  1.7868229
## 2  0.09513074  0.4727738  1.8899702  2.7892542 -1.3919091 -1.7198164
## 3  0.40285346  1.4655282 -1.4952788  2.9162340 -2.3893208  2.8161423
## 4  2.97446084  2.3896182  2.3083484 -1.1894441 -2.1982553  1.3666242
## 5 -0.98465055  0.6900643  1.5894209 -2.1204538  1.7961155 -0.9362189
## 6 -1.45726359 -2.1139548 -0.3964904  1.1764175 -2.9100556 -2.1359294
##       stat23     stat24     stat25    stat26     stat27      stat28
## 1 -2.3718851  2.8580718 -0.4719713 -2.817086 -0.9518474  2.88892484
## 2 -2.3293245  1.5577759 -1.9569720  1.554194 -0.5081459 -1.58715141
## 3 -2.5402296  0.1422861  0.3572798 -1.051886 -2.1541717  0.03074004
## 4 -1.9679050 -1.4077642  2.5097435  1.683121 -0.2549745 -2.90384054
## 5  2.0523429 -2.2084844 -1.9280857 -2.116736  1.8180779 -1.42167580
## 6  0.2184991 -0.7599817  2.6880329 -2.903350 -1.0733233 -2.92416644
##       stat29     stat30     stat31     stat32      stat33     stat34
## 1  0.7991088 -2.0059092 -0.2461502  0.6482101 -2.87462163 -0.3601543
## 2  1.9758110 -0.3874187  1.3566630  2.6493473  2.28463054  1.8591728
## 3 -0.4460218  1.0279679  1.3998452 -1.0183365  1.41109037 -2.4183984
## 4  1.0571996  2.5588036 -2.9830337 -1.1299983  0.05470414 -1.5566561
## 5  0.8854889  2.2774174  2.6499031  2.3053405 -2.39148426 -1.8272992
## 6 -0.8405267  0.1311945  0.4321289 -2.9622040 -2.55387473  2.6396458
##       stat35     stat36     stat37     stat38     stat39     stat40
## 1  2.4286051 -0.5420244 -2.6782637 -2.8874269 -0.8945006  1.1749642
## 2  1.3709245 -1.3714181  1.3901204  1.2273489 -0.8934880  1.0540369
## 3 -0.9805572  2.0571353  0.8845031  2.0574493  1.1222047  1.8528618
## 4  1.0969149 -2.2820673  1.8852408  0.5391517  2.7334342 -0.4372566
## 5 -1.0971669  1.4867796 -2.3738465 -0.3743561  1.4266498  1.2551680
## 6  0.4584349 -2.2696617 -0.9935142 -0.5350499 -0.7874799  2.0009417
##       stat41     stat42     stat43     stat44     stat45      stat46
## 1 -1.0474428 -1.3909023 2.54110503 -1.4320793  0.6298335 -2.09296608
## 2  2.5380247  1.6476108 0.44128850 -2.5049477  1.2726039  1.72492969
## 3  1.1477574  0.2288794 0.08891252  2.3044751 -0.7735722 -0.07302936
## 4 -1.3808300 -2.7900956 2.38297582  0.1686397 -2.1591296  1.60828602
## 5  0.2257536  1.9542116 2.66429019  0.8026123 -1.5521187  1.61751962
## 6 -1.3364114 -2.2898803 2.80735397 -0.8413086  1.0057797 -1.50653386
##       stat47     stat48     stat49     stat50     stat51     stat52
## 1 -2.8318939  2.1445766  0.5668035  0.1544579  0.6291955  2.2197027
## 2 -0.5804687 -1.3689737  1.4908396  1.2465997  0.8896304 -2.6024318
## 3  0.7918019  1.5712964  1.1038082 -0.2545658 -2.1662638  0.2660159
## 4 -1.8894132  0.5680230 -0.7023218 -0.3972188  0.1578027  2.1770194
## 5  2.1088455 -2.7195437  2.1961412 -0.2615084  1.2109556  0.8260623
## 6 -1.4400891 -0.9421459 -1.7324599 -2.1720727 -2.8129435  0.6958785
##      stat53     stat54      stat55     stat56     stat57      stat58
## 1  2.176805  0.5546907 -2.19704103 -0.2884173  1.3232913 -1.32824039
## 2 -2.107441  1.3864788  0.08781975  1.9998228  0.8014438 -0.26979154
## 3  1.234197  2.1337581  1.65231645 -0.4388691 -0.1811156  2.11277962
## 4  2.535406 -2.1387620  0.12856023 -1.9906180  0.9626449  1.65232646
## 5 -2.457080  2.1633499  0.60441124  2.5449364 -1.4978440  2.60542655
## 6  2.003033 -0.5379940 -2.19647264 -1.1954677 -0.5974466 -0.04703835
##        stat59     stat60    stat61      stat62     stat63     stat64
## 1  1.24239659 -2.5798278  1.327928  1.68560362  0.6284891 -1.6798652
## 2  0.06379301  0.9465770  1.116928  0.03128772 -2.1944375  0.3382609
## 3  0.93223447  2.4597080  0.465251 -1.71033382 -0.5156728  1.8276784
## 4 -0.29840910  0.7273473 -2.313066 -1.47696018  2.5910559 -1.5127999
## 5 -1.17610002 -1.7948418 -2.669305  0.17813617  2.8956099  2.9411416
## 6 -1.01793981  0.2817057  2.228023 -0.86494124 -0.9747949 -0.1569053
##       stat65     stat66     stat67     stat68     stat69    stat70
## 1 -2.9490898 -0.3325469  1.5745990 -2.2978280  1.5451891 -1.345990
## 2 -1.1174885 -1.5728682 -2.9229002  0.2658547 -1.9616533  2.506130
## 3 -0.2231264 -0.4503301  0.7932286 -1.2453773 -2.2309763  2.309761
## 4 -0.3522418 -2.0720532  0.9442933  2.9212906  0.5100371 -2.441108
## 5 -2.1648991  1.2002029  2.8266985  0.7461294  1.6772674 -1.280000
## 6 -2.2295458  1.1446493  0.2024925 -0.2983998 -2.8203752  1.224030
##       stat71     stat72     stat73     stat74      stat75     stat76
## 1  1.0260956  2.1071210  2.6625669 -2.8924677 -0.02132523 -2.5058765
## 2  0.3525076  1.6922342 -1.2167022 -1.7271879  2.21176434  1.9329683
## 3 -2.1799035 -2.2645276  0.1415582  0.9887453  1.95592320  0.2951785
## 4 -2.4051409  2.0876484 -0.8632146  0.4011389 -1.16986716 -1.2391174
## 5  1.3538754 -0.8089395 -0.5122626 -2.1696892  1.07344925  2.6696169
## 6 -2.8073371 -1.4450488  0.5481212 -1.4381690  0.80917043 -0.1365944
##       stat77      stat78      stat79     stat80     stat81     stat82
## 1 -2.5631845 -2.40331340  0.38416120 -1.2564875 -0.1550840 -1.1762617
## 2 -0.4462085  0.38400793  1.80483031 -0.8387642  0.7624431  0.9936900
## 3  1.6757870 -1.81900752  2.70904708 -0.3201959  2.5754235  1.6346260
## 4 -2.1012006 -2.24691081  1.78056848  1.0323739  1.0762523  2.1343851
## 5 -2.5736733 -1.99958372 -0.05388495 -2.5630073 -2.8783002 -0.5752426
## 6  1.6143972  0.03233746  2.90835762  1.4000487  2.9275615 -2.8503830
##       stat83     stat84     stat85     stat86    stat87     stat88
## 1  1.2840565 -2.6794965  1.3956039 -1.5290235  2.221152  2.3794982
## 2 -0.2380048  1.9314318 -1.6747955 -0.3663656  1.582659 -0.5222489
## 3 -0.9150769 -1.5520337  2.4186287  2.7273662  1.306642  0.1320062
## 4 -2.5824408 -2.7775943  0.5085060  0.4689015  2.053348  0.7957955
## 5 -1.0017741 -0.2009138  0.3770109  2.4335201 -1.118058  1.3953410
## 6  2.4891765  2.9931953 -1.4171852  0.3905659 -1.856119 -2.1690490
##       stat89     stat90     stat91      stat92     stat93     stat94
## 1 -0.9885110 -0.8873261 -2.7810929 -1.53325891  2.6002395  1.8890998
## 2  0.9982028 -1.2382015 -0.1574496  0.41086048 -0.5412626 -0.2421387
## 3  0.5956759  1.6871066  2.2452753  2.74279594 -1.5860478  2.9393122
## 4  2.0902634  2.1752586 -2.0677712 -2.37861037  1.1653302  0.1500632
## 5  2.9820614  0.8111660 -0.7842287  0.03766387 -1.1681970  2.1217251
## 6 -1.7428021  0.1579032  1.7456742 -0.36858466 -0.1304616 -1.4555819
##       stat95     stat96      stat97     stat98     stat99   stat100
## 1 -2.6056035 -0.5814857  2.57652426 -2.3297751  2.6324007  1.445827
## 2 -2.0271583 -0.9126074  2.49582648  0.9745382  1.1339203 -2.549544
## 3  0.3823181 -0.6324139  2.46221566  1.1151560  0.4624891  0.107072
## 4  2.6414623 -0.6630505  2.10394859  1.2627635  0.4861740  1.697012
## 5  1.4642254  2.6485956 -0.07699547  0.6219473 -1.8815142 -2.685463
## 6  1.8937331 -0.4690555  1.04671776 -0.5879866 -0.9766789  2.405940
##      stat101   stat102    stat103    stat104    stat105    stat106
## 1 -2.1158021  2.603936  1.7745128 -1.8903574 -1.8558655  1.0122044
## 2 -2.7998588 -2.267895  0.5336456 -0.2859477 -0.5196246 -0.9417582
## 3  0.7969509 -1.744906 -0.7960327  1.9767258 -0.2007264 -0.7872376
## 4  1.7071959 -1.540221  1.6770362  1.5395796 -0.4855365 -1.2894115
## 5 -1.4627420 -1.700983  2.4376490  0.2731541  1.5275587  1.3256483
## 6  2.6888530  1.090155  2.0769854  1.9615480  1.8689761  2.8975825
##     stat107    stat108   stat109    stat110    stat111    stat112
## 1  1.954508 -0.3376471  2.503084  0.3099165  2.7209847 -2.3911204
## 2 -2.515160  0.3998704 -1.077093  2.4228268 -0.7759693  0.2513882
## 3  1.888827  1.5819857 -2.066659 -2.0008364  0.6997684  2.6157095
## 4  1.076395 -1.8524148 -2.689204  1.0985872  1.2389493  2.1018629
## 5  2.828866 -1.8590252 -2.424163  1.4391942 -0.6173239 -1.5218846
## 6 -1.419639  0.7888914  1.996463  0.9813507  0.9034198  1.3810679
##     stat113    stat114    stat115     stat116   stat117    stat118
## 1 -1.616161  1.0878664  0.9860094 -0.06288462 -1.013501 -1.2212842
## 2 -1.554771  1.8683100  0.4880588 -0.63865489 -1.610217 -1.7713343
## 3 -2.679801 -2.9486952  1.7753417  0.90311784 -1.318836 -0.1429040
## 4  2.459229 -0.5584171  0.4419581 -0.09586351  0.595442  0.2883342
## 5 -2.102200  1.6300170 -2.3498287  1.36771894 -1.912202 -0.2563821
## 6 -1.835037  0.6577786 -2.9928374  2.13540316 -1.437299 -0.9570006
##      stat119    stat120    stat121    stat122    stat123     stat124
## 1  2.9222729  1.9151262  1.6686068  2.0061224  1.5723072  0.78819227
## 2  2.1828208  0.8283178 -2.4458632  1.7133740  1.1393738 -0.07182054
## 3  0.9721319  1.2723130  2.8002086  2.7670381 -2.2252586  2.17499113
## 4 -1.9327896 -2.5369370  1.7835028  1.0262097 -1.8790983 -0.43639564
## 5  1.3230809 -2.8145256 -0.9547533 -2.0435417 -0.2758764 -1.85668027
## 6  0.1720700 -1.4568460  1.4115051 -0.9878145  2.3895061 -2.33730745
##     stat125    stat126    stat127   stat128    stat129     stat130
## 1  1.588372  1.1620011 -0.2474264  1.650328  2.5147598  0.37283245
## 2 -1.173771  0.8162020  0.3510315 -1.263667  1.7245284 -0.72852904
## 3 -1.503497 -0.5656394  2.8040256 -2.139287 -1.7221642  2.17899609
## 4  1.040967 -2.9039600  0.3103742  1.462339 -1.2940350 -2.95015502
## 5 -2.866184  1.6885070 -2.2525666 -2.628631  1.8581577  2.80127025
## 6 -1.355111  1.5017927  0.4295921 -0.580415  0.9851009 -0.03773117
##       stat131    stat132    stat133    stat134    stat135      stat136
## 1 -0.09028241  0.5194538  2.8478346  2.6664724 -2.0206311  1.398415090
## 2 -0.53045595  1.4134049  2.9180586  0.3299096  1.4784122 -1.278896090
## 3  1.35843194  0.2279946  0.3532595  0.6138676 -0.3443284  0.057763811
## 4 -1.92450273  1.2698178 -1.5299660 -2.6083462  1.1665530 -0.187791914
## 5  1.49036849  2.6337729 -2.3206244  0.4978287 -1.7397571  0.001200184
## 6 -0.64642709 -1.9256228  1.7032650 -0.9152725 -0.3188055  2.155395980
##      stat137    stat138    stat139    stat140    stat141    stat142
## 1 -1.2794871  0.4064890 -0.4539998  2.6660173 -1.8375313  0.4711883
## 2 -2.7709017 -1.6303773 -1.9025910  0.2572918  0.6612002  1.4764348
## 3 -1.1930757 -0.1051243 -0.5108380 -1.0879666  2.4969513 -0.9477230
## 4 -1.2318919  2.2348571  0.1788580 -1.5851788 -1.2384283 -2.1859181
## 5  1.8685058  2.7229517 -2.9077182  2.6606939 -1.5963592 -2.2213492
## 6 -0.4807318 -1.2117369 -0.9358531 -2.5100758 -2.3803916 -0.7096854
##     stat143    stat144    stat145    stat146     stat147    stat148
## 1 1.9466263  2.2689433 -0.3597288 -0.6551386  1.65438592  0.6404466
## 2 1.3156421  2.4459090 -0.3790028  1.4858465 -0.07784461  1.0096149
## 3 0.1959563  2.3062942  1.8459278  2.6848175 -2.70935774 -1.2093409
## 4 1.7633296 -2.8171508  2.0902622 -2.6625464 -1.12600601 -2.1926479
## 5 0.3885758  1.8160636  2.8257299 -1.4526173  1.60679603  2.3807991
## 6 0.7623450  0.2692145 -2.4307463 -2.1244523 -2.67803812 -1.5273387
##      stat149   stat150    stat151    stat152    stat153    stat154
## 1  0.1583575 0.4755351  0.3213410  2.0241520  1.5720103 -0.1825875
## 2 -0.4311406 2.9577663  0.6937252  0.1397280  0.3775735 -1.1012636
## 3 -0.8352824 2.5716205  1.7528236  0.4326277 -2.2334397 -2.6265771
## 4 -2.8069143 1.8813509  2.3358023  0.1015632  1.2117474 -1.3714278
## 5 -1.6166265 1.1112266 -1.1998471  2.9316769 -2.1676455 -0.3411089
## 6 -0.2265472 2.7264354 -1.6746094 -2.3376281 -1.7022788 -1.2352397
##     stat155     stat156    stat157    stat158     stat159   stat160
## 1 -1.139657  0.07061254  0.5893906 -1.9920996 -2.83714366  2.249398
## 2 -2.041093  0.74047768  2.5415072 -1.2697256 -1.64364433 -2.448922
## 3 -1.219507 -0.55198693  0.4046920  1.2098547 -0.90412390 -1.934093
## 4  2.992191  2.33222485  2.0622969 -0.6714653  2.76836085 -1.431120
## 5 -2.362356 -1.23906672  0.4746319 -0.7849202  0.69399995  2.052411
## 6 -1.604499  1.31051409 -0.5164744  0.6288667  0.07899523 -2.287402
##      stat161    stat162    stat163    stat164    stat165    stat166
## 1  1.7182635 -1.2323593  2.7350423  1.0707235  1.1621544  0.9493989
## 2 -0.6247674  2.6740098  2.8211024  1.5561292 -1.1027147  1.0519739
## 3 -0.6230453 -0.7993517 -2.8318374 -1.1148673  1.4261659  0.5294309
## 4  1.7644744  0.1696584  1.2653207  0.6621516  0.9470508  0.1985014
## 5 -1.2070210  0.7243784  0.9736322  2.7426259 -2.6862383  1.6840212
## 6  2.3705316 -2.1667893 -0.2516685 -0.8425958 -1.9099342 -2.8607297
##      stat167    stat168    stat169     stat170     stat171    stat172
## 1  0.1146510  2.3872008  1.1180918 -0.95370555 -2.25076509  0.2348182
## 2  1.0760417 -2.0449336  0.9715676 -0.40173489 -0.11953555 -2.3107369
## 3  1.1735898  1.3860190 -2.2894719  0.06350347  0.29191551 -1.6079744
## 4  2.5511832  0.5446648  1.2694012 -0.84571201  0.79789722  0.2623538
## 5  2.2900002  2.6289782 -0.2783571  1.39032829 -0.55532032  1.0499046
## 6 -0.7513983  2.9617066 -2.2119520 -1.71958113 -0.01452018 -0.2751517
##       stat173   stat174     stat175    stat176     stat177    stat178
## 1  1.79366076 -1.920206 -0.38841942  0.8530301  1.64532077 -1.1354179
## 2 -0.07484659  1.337846  2.20911694  0.9616837 -2.80810070 -2.1136749
## 3 -1.05521810 -1.483741  0.06148359  2.3066039 -0.34688616  1.1840581
## 4  0.31460321  1.195741  2.97633862  1.1685091 -0.06346265  1.4205489
## 5 -1.39428365  2.458523  0.64836472 -1.0396386 -0.57828104 -0.5006818
## 6  2.31844401  1.239864 -2.06490874  0.7696204 -1.77586019  2.0855925
##      stat179    stat180     stat181    stat182    stat183    stat184
## 1  2.0018647  0.1476815 -1.27279520  1.9181504 -0.5297624 -2.9718938
## 2 -2.1351449  2.9012582 -1.09914911 -2.5488517 -2.8377736  1.4073374
## 3 -1.7819908  2.9902627  0.81908613  0.2503852  0.3712984 -2.1714024
## 4 -0.1026974 -2.4763253 -2.52645421  1.3096315  2.1458161 -1.5228094
## 5 -2.2298794  2.4465680 -0.70346898 -1.6997617  2.9178164 -0.3615532
## 6 -1.1168108  1.5552123 -0.01361342  1.7338791 -1.1104763  0.1882416
##      stat185    stat186   stat187    stat188    stat189    stat190
## 1 -0.1043832 -1.5047463  2.700351 -2.4780862 -1.9078265  0.9978108
## 2 -2.0310574 -0.5380074 -1.963275 -1.2221278 -2.4290681 -1.9515115
## 3  2.6727278  1.2688179 -1.399018 -2.9612138  2.6456394  2.0073323
## 4 -2.7796295  2.0682354  2.243727  0.4296881  0.1931333  2.2710960
## 5 -0.6231265  2.5833981  2.229041  0.8139584  1.4544131  1.8886451
## 6  2.7204690 -2.4469144 -1.421998  1.7477882 -0.1481806  0.6011560
##      stat191    stat192    stat193    stat194   stat195    stat196
## 1 -0.6644351  2.6270833 -1.1094601 -2.4200392  2.870713 -0.6590932
## 2 -0.6483142  1.4519118 -0.1963493 -2.3025322  1.255608  2.1617947
## 3 -1.5457382 -0.2977442 -1.7045015  0.7962404 -1.696063 -1.4771117
## 4 -1.1780495 -2.9747574 -1.1471518 -1.2377013 -1.010672 -2.6055975
## 5  2.8813178 -1.8964081 -1.2653487 -1.7839754 -2.872581  2.3033464
## 6  0.4437973  0.6599325 -1.4029555 -2.3118258 -1.792232  1.3934380
##       stat197    stat198    stat199    stat200    stat201    stat202
## 1 -0.83056986  0.9550526 -1.7025776 -2.8263099 -0.7023998  0.2272806
## 2 -1.42178249 -1.2471864  2.5723093 -0.0233496 -1.8975239  1.9472262
## 3 -0.19233958 -0.5161456  0.0279946 -1.2333704 -2.9672263 -2.8666208
## 4 -1.23145902  1.4728470 -0.4562025 -2.2983441 -1.5101184  0.2530525
## 5  1.85018563 -1.8269292 -0.6337969 -2.1473246  0.9909850  1.0950903
## 6 -0.09311061  0.5144456 -2.8178268 -2.7555969 -2.3546004 -1.0558939
##        stat203      stat204    stat205    stat206    stat207    stat208
## 1  1.166631220  0.007453276  2.9961641  1.5327307 -2.2293356 -0.9946009
## 2 -0.235396504  2.132749800  0.3707606  1.5604026 -1.0089217  2.1474257
## 3  0.003180946  2.229793310  2.7354040  0.8992231  2.9694967  2.3081024
## 4 -0.474482715 -1.584772230 -2.3224132 -0.9409741 -2.3179255  0.8032548
## 5  2.349412920 -1.276320220 -2.0203719 -1.1733509  1.0371852 -2.5086207
## 6  0.727436960 -0.960191786 -0.8964998 -1.6406623 -0.2330488  1.7993879
##      stat209    stat210   stat211    stat212    stat213    stat214
## 1 -2.2182105 -1.4099774 -1.656754  2.6602585 -2.9270992  1.1240714
## 2 -2.8932488 -1.1641679 -2.605423 -1.5650513  2.9523673  2.0266318
## 3 -1.8279589  0.0472350 -2.026734  2.5054367  0.9903042  0.3274105
## 4 -1.0878067  0.1171303  2.645891 -1.6775225  1.3452160  1.4694063
## 5 -0.8158175  0.4060950  0.912256  0.2925677  2.1610141  0.5679936
## 6 -2.2664354 -0.2061083 -1.435174  2.6645632  0.4216259 -0.6419122
##      stat215    stat216    stat217
## 1 -2.7510750 -0.5501796  1.2638469
## 2  2.8934650 -2.4099574 -1.2411407
## 3 -1.0947676  1.2852937  1.5411530
## 4  0.6343777  0.1345372  2.9102673
## 5  0.9908702  1.7909757 -2.0902610
## 6 -2.8113887 -1.0624912  0.2765074
head(features.highprec)
##     JobName        x1       x2       x3        x4       x5        x6
## 1 Job_00001 2.0734508 4.917267 19.96188  3.520878 7.861051 1.6067589
## 2 Job_00002 2.2682543 4.955773 19.11939 19.763031 6.931355 1.3622041
## 3 Job_00003 1.7424456 2.059819 13.37912 38.829132 6.274053 2.0529845
## 4 Job_00004 0.7873555 2.613983 17.23044 64.402557 5.377652 0.9067419
## 5 Job_00005 2.3342753 4.299076 14.64883 52.537304 6.793368 2.4605792
## 6 Job_00006 1.2365089 2.795370 11.13127 96.819939 6.583971 2.3510606
##         x7        x8       x9       x10          x11      x12       x13
## 1 2.979479  8.537228 1.103368 4.6089458 1.050025e-07 7.995825 13.215498
## 2 2.388119  6.561461 0.588572 1.0283282 1.034518e-07 7.486966 22.557224
## 3 2.043592 10.275595 4.834385 4.3872848 1.062312e-07 6.350142 15.049810
## 4 2.395118 13.487331 3.340190 4.5053501 9.471887e-08 9.548698 17.170635
## 5 2.891535  9.362389 1.246039 1.7333300 1.010552e-07 9.596095  5.794567
## 6 1.247838  7.033354 1.852231 0.4839371 1.071662e-07 3.810983 23.863169
##        x14       x15       x16      x17      x18      x19      x20
## 1 4.377983 0.2370623  6.075459 3.988347 4.767475 2.698775 1.035893
## 2 2.059315 0.5638121  6.903891 4.152054 6.849232 9.620731 1.915288
## 3 3.260057 2.0603445  8.424065 4.489893 3.493591 4.715386 1.558103
## 4 3.093478 1.8806034 11.189792 2.134271 5.588357 5.107871 1.489588
## 5 3.943076 1.5820830  7.096742 3.563378 7.765610 1.360272 1.240283
## 6 1.280562 1.1733382  7.062051 1.341864 7.748325 5.009365 1.725179
##        x21      x22      x23      stat1      stat2      stat3      stat4
## 1 42.36548 1.356213 2.699796  2.3801832  0.1883335 -1.2284011 -0.5999233
## 2 26.63295 4.053961 2.375127 -1.4069480  1.8140973  1.6204884  2.6422672
## 3 20.09693 3.079888 4.488420 -0.7672566 -0.1230289  1.1415752  2.9805934
## 4 32.60415 1.355396 3.402398  0.4371202 -1.9355906  0.9028624 -1.6025400
## 5 44.58361 1.940301 2.249011  2.4492466 -0.6172000 -2.5520642 -2.1485929
## 6 28.75102 2.500499 5.563972 -1.7899084  1.8853619  2.4154840 -2.6022179
##          stat5      stat6      stat7      stat8      stat9      stat10
## 1  0.148893163 -0.6622978 -2.4851868  0.3647782  2.5364335  2.92067981
## 2  1.920768980  1.7411555 -1.9599979 -2.0190558 -1.3732762 -0.31642506
## 3  2.422584300 -0.4166040  2.2205689 -2.6741531  0.4844292  2.73379230
## 4 -0.001795933 -0.6946563 -0.3693534 -0.9709467  1.7960306  0.74771154
## 5 -2.311132430 -1.0166832  2.7269876  1.5424492 -1.3156369 -0.09767897
## 6 -1.785491470 -1.8599915  1.4875095  2.0188572 -1.4892503 -1.41103566
##       stat11    stat12     stat13     stat14     stat15     stat16
## 1 -2.3228905 -2.480567 -0.6335157 -0.3650149 -0.5322812  0.6029300
## 2 -0.8547903  1.119316  0.7227427  0.2121097 -0.1452281 -2.0361528
## 3 -2.1821580  2.865401 -2.9756081  2.9871745  1.9539525 -1.8857163
## 4  1.3982378  1.856765 -1.0379983  2.3341896  2.3057184 -2.8947697
## 5  0.9567220  2.567549  0.3184886  1.0307668  0.1644241 -0.6613821
## 6  0.5341771 -1.461822  0.4402476 -1.9282095 -0.3680157  1.8188807
##        stat17     stat18     stat19     stat20     stat21     stat22
## 1 -1.04516208  2.3544915  2.4049001  0.2633883 -0.9788178  1.7868229
## 2  0.09513074  0.4727738  1.8899702  2.7892542 -1.3919091 -1.7198164
## 3  0.40285346  1.4655282 -1.4952788  2.9162340 -2.3893208  2.8161423
## 4  2.97446084  2.3896182  2.3083484 -1.1894441 -2.1982553  1.3666242
## 5 -0.98465055  0.6900643  1.5894209 -2.1204538  1.7961155 -0.9362189
## 6 -1.45726359 -2.1139548 -0.3964904  1.1764175 -2.9100556 -2.1359294
##       stat23     stat24     stat25    stat26     stat27      stat28
## 1 -2.3718851  2.8580718 -0.4719713 -2.817086 -0.9518474  2.88892484
## 2 -2.3293245  1.5577759 -1.9569720  1.554194 -0.5081459 -1.58715141
## 3 -2.5402296  0.1422861  0.3572798 -1.051886 -2.1541717  0.03074004
## 4 -1.9679050 -1.4077642  2.5097435  1.683121 -0.2549745 -2.90384054
## 5  2.0523429 -2.2084844 -1.9280857 -2.116736  1.8180779 -1.42167580
## 6  0.2184991 -0.7599817  2.6880329 -2.903350 -1.0733233 -2.92416644
##       stat29     stat30     stat31     stat32      stat33     stat34
## 1  0.7991088 -2.0059092 -0.2461502  0.6482101 -2.87462163 -0.3601543
## 2  1.9758110 -0.3874187  1.3566630  2.6493473  2.28463054  1.8591728
## 3 -0.4460218  1.0279679  1.3998452 -1.0183365  1.41109037 -2.4183984
## 4  1.0571996  2.5588036 -2.9830337 -1.1299983  0.05470414 -1.5566561
## 5  0.8854889  2.2774174  2.6499031  2.3053405 -2.39148426 -1.8272992
## 6 -0.8405267  0.1311945  0.4321289 -2.9622040 -2.55387473  2.6396458
##       stat35     stat36     stat37     stat38     stat39     stat40
## 1  2.4286051 -0.5420244 -2.6782637 -2.8874269 -0.8945006  1.1749642
## 2  1.3709245 -1.3714181  1.3901204  1.2273489 -0.8934880  1.0540369
## 3 -0.9805572  2.0571353  0.8845031  2.0574493  1.1222047  1.8528618
## 4  1.0969149 -2.2820673  1.8852408  0.5391517  2.7334342 -0.4372566
## 5 -1.0971669  1.4867796 -2.3738465 -0.3743561  1.4266498  1.2551680
## 6  0.4584349 -2.2696617 -0.9935142 -0.5350499 -0.7874799  2.0009417
##       stat41     stat42     stat43     stat44     stat45      stat46
## 1 -1.0474428 -1.3909023 2.54110503 -1.4320793  0.6298335 -2.09296608
## 2  2.5380247  1.6476108 0.44128850 -2.5049477  1.2726039  1.72492969
## 3  1.1477574  0.2288794 0.08891252  2.3044751 -0.7735722 -0.07302936
## 4 -1.3808300 -2.7900956 2.38297582  0.1686397 -2.1591296  1.60828602
## 5  0.2257536  1.9542116 2.66429019  0.8026123 -1.5521187  1.61751962
## 6 -1.3364114 -2.2898803 2.80735397 -0.8413086  1.0057797 -1.50653386
##       stat47     stat48     stat49     stat50     stat51     stat52
## 1 -2.8318939  2.1445766  0.5668035  0.1544579  0.6291955  2.2197027
## 2 -0.5804687 -1.3689737  1.4908396  1.2465997  0.8896304 -2.6024318
## 3  0.7918019  1.5712964  1.1038082 -0.2545658 -2.1662638  0.2660159
## 4 -1.8894132  0.5680230 -0.7023218 -0.3972188  0.1578027  2.1770194
## 5  2.1088455 -2.7195437  2.1961412 -0.2615084  1.2109556  0.8260623
## 6 -1.4400891 -0.9421459 -1.7324599 -2.1720727 -2.8129435  0.6958785
##      stat53     stat54      stat55     stat56     stat57      stat58
## 1  2.176805  0.5546907 -2.19704103 -0.2884173  1.3232913 -1.32824039
## 2 -2.107441  1.3864788  0.08781975  1.9998228  0.8014438 -0.26979154
## 3  1.234197  2.1337581  1.65231645 -0.4388691 -0.1811156  2.11277962
## 4  2.535406 -2.1387620  0.12856023 -1.9906180  0.9626449  1.65232646
## 5 -2.457080  2.1633499  0.60441124  2.5449364 -1.4978440  2.60542655
## 6  2.003033 -0.5379940 -2.19647264 -1.1954677 -0.5974466 -0.04703835
##        stat59     stat60    stat61      stat62     stat63     stat64
## 1  1.24239659 -2.5798278  1.327928  1.68560362  0.6284891 -1.6798652
## 2  0.06379301  0.9465770  1.116928  0.03128772 -2.1944375  0.3382609
## 3  0.93223447  2.4597080  0.465251 -1.71033382 -0.5156728  1.8276784
## 4 -0.29840910  0.7273473 -2.313066 -1.47696018  2.5910559 -1.5127999
## 5 -1.17610002 -1.7948418 -2.669305  0.17813617  2.8956099  2.9411416
## 6 -1.01793981  0.2817057  2.228023 -0.86494124 -0.9747949 -0.1569053
##       stat65     stat66     stat67     stat68     stat69    stat70
## 1 -2.9490898 -0.3325469  1.5745990 -2.2978280  1.5451891 -1.345990
## 2 -1.1174885 -1.5728682 -2.9229002  0.2658547 -1.9616533  2.506130
## 3 -0.2231264 -0.4503301  0.7932286 -1.2453773 -2.2309763  2.309761
## 4 -0.3522418 -2.0720532  0.9442933  2.9212906  0.5100371 -2.441108
## 5 -2.1648991  1.2002029  2.8266985  0.7461294  1.6772674 -1.280000
## 6 -2.2295458  1.1446493  0.2024925 -0.2983998 -2.8203752  1.224030
##       stat71     stat72     stat73     stat74      stat75     stat76
## 1  1.0260956  2.1071210  2.6625669 -2.8924677 -0.02132523 -2.5058765
## 2  0.3525076  1.6922342 -1.2167022 -1.7271879  2.21176434  1.9329683
## 3 -2.1799035 -2.2645276  0.1415582  0.9887453  1.95592320  0.2951785
## 4 -2.4051409  2.0876484 -0.8632146  0.4011389 -1.16986716 -1.2391174
## 5  1.3538754 -0.8089395 -0.5122626 -2.1696892  1.07344925  2.6696169
## 6 -2.8073371 -1.4450488  0.5481212 -1.4381690  0.80917043 -0.1365944
##       stat77      stat78      stat79     stat80     stat81     stat82
## 1 -2.5631845 -2.40331340  0.38416120 -1.2564875 -0.1550840 -1.1762617
## 2 -0.4462085  0.38400793  1.80483031 -0.8387642  0.7624431  0.9936900
## 3  1.6757870 -1.81900752  2.70904708 -0.3201959  2.5754235  1.6346260
## 4 -2.1012006 -2.24691081  1.78056848  1.0323739  1.0762523  2.1343851
## 5 -2.5736733 -1.99958372 -0.05388495 -2.5630073 -2.8783002 -0.5752426
## 6  1.6143972  0.03233746  2.90835762  1.4000487  2.9275615 -2.8503830
##       stat83     stat84     stat85     stat86    stat87     stat88
## 1  1.2840565 -2.6794965  1.3956039 -1.5290235  2.221152  2.3794982
## 2 -0.2380048  1.9314318 -1.6747955 -0.3663656  1.582659 -0.5222489
## 3 -0.9150769 -1.5520337  2.4186287  2.7273662  1.306642  0.1320062
## 4 -2.5824408 -2.7775943  0.5085060  0.4689015  2.053348  0.7957955
## 5 -1.0017741 -0.2009138  0.3770109  2.4335201 -1.118058  1.3953410
## 6  2.4891765  2.9931953 -1.4171852  0.3905659 -1.856119 -2.1690490
##       stat89     stat90     stat91      stat92     stat93     stat94
## 1 -0.9885110 -0.8873261 -2.7810929 -1.53325891  2.6002395  1.8890998
## 2  0.9982028 -1.2382015 -0.1574496  0.41086048 -0.5412626 -0.2421387
## 3  0.5956759  1.6871066  2.2452753  2.74279594 -1.5860478  2.9393122
## 4  2.0902634  2.1752586 -2.0677712 -2.37861037  1.1653302  0.1500632
## 5  2.9820614  0.8111660 -0.7842287  0.03766387 -1.1681970  2.1217251
## 6 -1.7428021  0.1579032  1.7456742 -0.36858466 -0.1304616 -1.4555819
##       stat95     stat96      stat97     stat98     stat99   stat100
## 1 -2.6056035 -0.5814857  2.57652426 -2.3297751  2.6324007  1.445827
## 2 -2.0271583 -0.9126074  2.49582648  0.9745382  1.1339203 -2.549544
## 3  0.3823181 -0.6324139  2.46221566  1.1151560  0.4624891  0.107072
## 4  2.6414623 -0.6630505  2.10394859  1.2627635  0.4861740  1.697012
## 5  1.4642254  2.6485956 -0.07699547  0.6219473 -1.8815142 -2.685463
## 6  1.8937331 -0.4690555  1.04671776 -0.5879866 -0.9766789  2.405940
##      stat101   stat102    stat103    stat104    stat105    stat106
## 1 -2.1158021  2.603936  1.7745128 -1.8903574 -1.8558655  1.0122044
## 2 -2.7998588 -2.267895  0.5336456 -0.2859477 -0.5196246 -0.9417582
## 3  0.7969509 -1.744906 -0.7960327  1.9767258 -0.2007264 -0.7872376
## 4  1.7071959 -1.540221  1.6770362  1.5395796 -0.4855365 -1.2894115
## 5 -1.4627420 -1.700983  2.4376490  0.2731541  1.5275587  1.3256483
## 6  2.6888530  1.090155  2.0769854  1.9615480  1.8689761  2.8975825
##     stat107    stat108   stat109    stat110    stat111    stat112
## 1  1.954508 -0.3376471  2.503084  0.3099165  2.7209847 -2.3911204
## 2 -2.515160  0.3998704 -1.077093  2.4228268 -0.7759693  0.2513882
## 3  1.888827  1.5819857 -2.066659 -2.0008364  0.6997684  2.6157095
## 4  1.076395 -1.8524148 -2.689204  1.0985872  1.2389493  2.1018629
## 5  2.828866 -1.8590252 -2.424163  1.4391942 -0.6173239 -1.5218846
## 6 -1.419639  0.7888914  1.996463  0.9813507  0.9034198  1.3810679
##     stat113    stat114    stat115     stat116   stat117    stat118
## 1 -1.616161  1.0878664  0.9860094 -0.06288462 -1.013501 -1.2212842
## 2 -1.554771  1.8683100  0.4880588 -0.63865489 -1.610217 -1.7713343
## 3 -2.679801 -2.9486952  1.7753417  0.90311784 -1.318836 -0.1429040
## 4  2.459229 -0.5584171  0.4419581 -0.09586351  0.595442  0.2883342
## 5 -2.102200  1.6300170 -2.3498287  1.36771894 -1.912202 -0.2563821
## 6 -1.835037  0.6577786 -2.9928374  2.13540316 -1.437299 -0.9570006
##      stat119    stat120    stat121    stat122    stat123     stat124
## 1  2.9222729  1.9151262  1.6686068  2.0061224  1.5723072  0.78819227
## 2  2.1828208  0.8283178 -2.4458632  1.7133740  1.1393738 -0.07182054
## 3  0.9721319  1.2723130  2.8002086  2.7670381 -2.2252586  2.17499113
## 4 -1.9327896 -2.5369370  1.7835028  1.0262097 -1.8790983 -0.43639564
## 5  1.3230809 -2.8145256 -0.9547533 -2.0435417 -0.2758764 -1.85668027
## 6  0.1720700 -1.4568460  1.4115051 -0.9878145  2.3895061 -2.33730745
##     stat125    stat126    stat127   stat128    stat129     stat130
## 1  1.588372  1.1620011 -0.2474264  1.650328  2.5147598  0.37283245
## 2 -1.173771  0.8162020  0.3510315 -1.263667  1.7245284 -0.72852904
## 3 -1.503497 -0.5656394  2.8040256 -2.139287 -1.7221642  2.17899609
## 4  1.040967 -2.9039600  0.3103742  1.462339 -1.2940350 -2.95015502
## 5 -2.866184  1.6885070 -2.2525666 -2.628631  1.8581577  2.80127025
## 6 -1.355111  1.5017927  0.4295921 -0.580415  0.9851009 -0.03773117
##       stat131    stat132    stat133    stat134    stat135      stat136
## 1 -0.09028241  0.5194538  2.8478346  2.6664724 -2.0206311  1.398415090
## 2 -0.53045595  1.4134049  2.9180586  0.3299096  1.4784122 -1.278896090
## 3  1.35843194  0.2279946  0.3532595  0.6138676 -0.3443284  0.057763811
## 4 -1.92450273  1.2698178 -1.5299660 -2.6083462  1.1665530 -0.187791914
## 5  1.49036849  2.6337729 -2.3206244  0.4978287 -1.7397571  0.001200184
## 6 -0.64642709 -1.9256228  1.7032650 -0.9152725 -0.3188055  2.155395980
##      stat137    stat138    stat139    stat140    stat141    stat142
## 1 -1.2794871  0.4064890 -0.4539998  2.6660173 -1.8375313  0.4711883
## 2 -2.7709017 -1.6303773 -1.9025910  0.2572918  0.6612002  1.4764348
## 3 -1.1930757 -0.1051243 -0.5108380 -1.0879666  2.4969513 -0.9477230
## 4 -1.2318919  2.2348571  0.1788580 -1.5851788 -1.2384283 -2.1859181
## 5  1.8685058  2.7229517 -2.9077182  2.6606939 -1.5963592 -2.2213492
## 6 -0.4807318 -1.2117369 -0.9358531 -2.5100758 -2.3803916 -0.7096854
##     stat143    stat144    stat145    stat146     stat147    stat148
## 1 1.9466263  2.2689433 -0.3597288 -0.6551386  1.65438592  0.6404466
## 2 1.3156421  2.4459090 -0.3790028  1.4858465 -0.07784461  1.0096149
## 3 0.1959563  2.3062942  1.8459278  2.6848175 -2.70935774 -1.2093409
## 4 1.7633296 -2.8171508  2.0902622 -2.6625464 -1.12600601 -2.1926479
## 5 0.3885758  1.8160636  2.8257299 -1.4526173  1.60679603  2.3807991
## 6 0.7623450  0.2692145 -2.4307463 -2.1244523 -2.67803812 -1.5273387
##      stat149   stat150    stat151    stat152    stat153    stat154
## 1  0.1583575 0.4755351  0.3213410  2.0241520  1.5720103 -0.1825875
## 2 -0.4311406 2.9577663  0.6937252  0.1397280  0.3775735 -1.1012636
## 3 -0.8352824 2.5716205  1.7528236  0.4326277 -2.2334397 -2.6265771
## 4 -2.8069143 1.8813509  2.3358023  0.1015632  1.2117474 -1.3714278
## 5 -1.6166265 1.1112266 -1.1998471  2.9316769 -2.1676455 -0.3411089
## 6 -0.2265472 2.7264354 -1.6746094 -2.3376281 -1.7022788 -1.2352397
##     stat155     stat156    stat157    stat158     stat159   stat160
## 1 -1.139657  0.07061254  0.5893906 -1.9920996 -2.83714366  2.249398
## 2 -2.041093  0.74047768  2.5415072 -1.2697256 -1.64364433 -2.448922
## 3 -1.219507 -0.55198693  0.4046920  1.2098547 -0.90412390 -1.934093
## 4  2.992191  2.33222485  2.0622969 -0.6714653  2.76836085 -1.431120
## 5 -2.362356 -1.23906672  0.4746319 -0.7849202  0.69399995  2.052411
## 6 -1.604499  1.31051409 -0.5164744  0.6288667  0.07899523 -2.287402
##      stat161    stat162    stat163    stat164    stat165    stat166
## 1  1.7182635 -1.2323593  2.7350423  1.0707235  1.1621544  0.9493989
## 2 -0.6247674  2.6740098  2.8211024  1.5561292 -1.1027147  1.0519739
## 3 -0.6230453 -0.7993517 -2.8318374 -1.1148673  1.4261659  0.5294309
## 4  1.7644744  0.1696584  1.2653207  0.6621516  0.9470508  0.1985014
## 5 -1.2070210  0.7243784  0.9736322  2.7426259 -2.6862383  1.6840212
## 6  2.3705316 -2.1667893 -0.2516685 -0.8425958 -1.9099342 -2.8607297
##      stat167    stat168    stat169     stat170     stat171    stat172
## 1  0.1146510  2.3872008  1.1180918 -0.95370555 -2.25076509  0.2348182
## 2  1.0760417 -2.0449336  0.9715676 -0.40173489 -0.11953555 -2.3107369
## 3  1.1735898  1.3860190 -2.2894719  0.06350347  0.29191551 -1.6079744
## 4  2.5511832  0.5446648  1.2694012 -0.84571201  0.79789722  0.2623538
## 5  2.2900002  2.6289782 -0.2783571  1.39032829 -0.55532032  1.0499046
## 6 -0.7513983  2.9617066 -2.2119520 -1.71958113 -0.01452018 -0.2751517
##       stat173   stat174     stat175    stat176     stat177    stat178
## 1  1.79366076 -1.920206 -0.38841942  0.8530301  1.64532077 -1.1354179
## 2 -0.07484659  1.337846  2.20911694  0.9616837 -2.80810070 -2.1136749
## 3 -1.05521810 -1.483741  0.06148359  2.3066039 -0.34688616  1.1840581
## 4  0.31460321  1.195741  2.97633862  1.1685091 -0.06346265  1.4205489
## 5 -1.39428365  2.458523  0.64836472 -1.0396386 -0.57828104 -0.5006818
## 6  2.31844401  1.239864 -2.06490874  0.7696204 -1.77586019  2.0855925
##      stat179    stat180     stat181    stat182    stat183    stat184
## 1  2.0018647  0.1476815 -1.27279520  1.9181504 -0.5297624 -2.9718938
## 2 -2.1351449  2.9012582 -1.09914911 -2.5488517 -2.8377736  1.4073374
## 3 -1.7819908  2.9902627  0.81908613  0.2503852  0.3712984 -2.1714024
## 4 -0.1026974 -2.4763253 -2.52645421  1.3096315  2.1458161 -1.5228094
## 5 -2.2298794  2.4465680 -0.70346898 -1.6997617  2.9178164 -0.3615532
## 6 -1.1168108  1.5552123 -0.01361342  1.7338791 -1.1104763  0.1882416
##      stat185    stat186   stat187    stat188    stat189    stat190
## 1 -0.1043832 -1.5047463  2.700351 -2.4780862 -1.9078265  0.9978108
## 2 -2.0310574 -0.5380074 -1.963275 -1.2221278 -2.4290681 -1.9515115
## 3  2.6727278  1.2688179 -1.399018 -2.9612138  2.6456394  2.0073323
## 4 -2.7796295  2.0682354  2.243727  0.4296881  0.1931333  2.2710960
## 5 -0.6231265  2.5833981  2.229041  0.8139584  1.4544131  1.8886451
## 6  2.7204690 -2.4469144 -1.421998  1.7477882 -0.1481806  0.6011560
##      stat191    stat192    stat193    stat194   stat195    stat196
## 1 -0.6644351  2.6270833 -1.1094601 -2.4200392  2.870713 -0.6590932
## 2 -0.6483142  1.4519118 -0.1963493 -2.3025322  1.255608  2.1617947
## 3 -1.5457382 -0.2977442 -1.7045015  0.7962404 -1.696063 -1.4771117
## 4 -1.1780495 -2.9747574 -1.1471518 -1.2377013 -1.010672 -2.6055975
## 5  2.8813178 -1.8964081 -1.2653487 -1.7839754 -2.872581  2.3033464
## 6  0.4437973  0.6599325 -1.4029555 -2.3118258 -1.792232  1.3934380
##       stat197    stat198    stat199    stat200    stat201    stat202
## 1 -0.83056986  0.9550526 -1.7025776 -2.8263099 -0.7023998  0.2272806
## 2 -1.42178249 -1.2471864  2.5723093 -0.0233496 -1.8975239  1.9472262
## 3 -0.19233958 -0.5161456  0.0279946 -1.2333704 -2.9672263 -2.8666208
## 4 -1.23145902  1.4728470 -0.4562025 -2.2983441 -1.5101184  0.2530525
## 5  1.85018563 -1.8269292 -0.6337969 -2.1473246  0.9909850  1.0950903
## 6 -0.09311061  0.5144456 -2.8178268 -2.7555969 -2.3546004 -1.0558939
##        stat203      stat204    stat205    stat206    stat207    stat208
## 1  1.166631220  0.007453276  2.9961641  1.5327307 -2.2293356 -0.9946009
## 2 -0.235396504  2.132749800  0.3707606  1.5604026 -1.0089217  2.1474257
## 3  0.003180946  2.229793310  2.7354040  0.8992231  2.9694967  2.3081024
## 4 -0.474482715 -1.584772230 -2.3224132 -0.9409741 -2.3179255  0.8032548
## 5  2.349412920 -1.276320220 -2.0203719 -1.1733509  1.0371852 -2.5086207
## 6  0.727436960 -0.960191786 -0.8964998 -1.6406623 -0.2330488  1.7993879
##      stat209    stat210   stat211    stat212    stat213    stat214
## 1 -2.2182105 -1.4099774 -1.656754  2.6602585 -2.9270992  1.1240714
## 2 -2.8932488 -1.1641679 -2.605423 -1.5650513  2.9523673  2.0266318
## 3 -1.8279589  0.0472350 -2.026734  2.5054367  0.9903042  0.3274105
## 4 -1.0878067  0.1171303  2.645891 -1.6775225  1.3452160  1.4694063
## 5 -0.8158175  0.4060950  0.912256  0.2925677  2.1610141  0.5679936
## 6 -2.2664354 -0.2061083 -1.435174  2.6645632  0.4216259 -0.6419122
##      stat215    stat216    stat217
## 1 -2.7510750 -0.5501796  1.2638469
## 2  2.8934650 -2.4099574 -1.2411407
## 3 -1.0947676  1.2852937  1.5411530
## 4  0.6343777  0.1345372  2.9102673
## 5  0.9908702  1.7909757 -2.0902610
## 6 -2.8113887 -1.0624912  0.2765074
features = features.highprec
#str(features) 

Checking correlations to evaluate removal of redundant features

corr.matrix = round(cor(features[sapply(features, is.numeric)]),2)

# filter out only highly correlated variables
threshold = 0.6
corr.matrix.tmp = corr.matrix
diag(corr.matrix.tmp) = 0
high.corr = apply(abs(corr.matrix.tmp) >= threshold, 1, any)
high.corr.matrix = corr.matrix.tmp[high.corr, high.corr]

DT::datatable(corr.matrix)
DT::datatable(high.corr.matrix)

Feature Names

feature.names = colnames(features)
drops <- c('JobName')
feature.names = feature.names[!(feature.names %in% drops)]
#str(feature.names)

Read and Clean Labels

labels = read.csv("../../Data/labels.csv")
#str(labels)
labels = labels[complete.cases(labels),]
labels = labels[,c("JobName", output.var)]
summary(labels)
##       JobName           y3        
##  Job_00001:   1   Min.   : 95.91  
##  Job_00002:   1   1st Qu.:118.29  
##  Job_00003:   1   Median :124.03  
##  Job_00004:   1   Mean   :125.40  
##  Job_00007:   1   3rd Qu.:131.06  
##  Job_00008:   1   Max.   :193.73  
##  (Other)  :6974

Merge Datasets

data <- merge(features, labels, by = 'JobName')
drops <- c('JobName')
data = data[,(!colnames(data) %in% drops)]
#str(data)

Transformations

if (transform.abs == TRUE){
  data[,label.names] = 10^(data[,label.names]/20)
  #data = filter(data, y3 < 1E7)
}
if (log.pred == TRUE){
  data[label.names] = log(data[alt.scale.label.name],10)
  
  drops = c(alt.scale.label.name)
  data = data[!(names(data) %in% drops)]
}

t = NULL # initializw to NULL for other cases
if (norm.pred == TRUE){
  t = bestNormalize::bestNormalize(data[[alt.scale.label.name]])
  data[label.names] = predict(t)
  
  drops = c(alt.scale.label.name)
  data = data[!(names(data) %in% drops)]
}
#str(data)

Remove NA Cases

data = data[complete.cases(data),]

Exploratory Data Analysis

Check correlation of Label with Featires

if (eda == TRUE){
  corr.to.label =round(cor(dplyr::select(data,-one_of(label.names)),dplyr::select_at(data,label.names)),4)
  DT::datatable(corr.to.label)
}

Multicollinearity - VIF

if (eda == TRUE){
  vifDF = usdm::vif(select_at(data,feature.names)) %>% arrange(desc(VIF))
  head(vifDF,10)
}

Scatterplots

panel.hist <- function(x, ...)
{
    usr <- par("usr"); on.exit(par(usr))
    par(usr = c(usr[1:2], 0, 1.5) )
    h <- hist(x, plot = FALSE)
    breaks <- h$breaks; nB <- length(breaks)
    y <- h$counts; y <- y/max(y)
    rect(breaks[-nB], 0, breaks[-1], y, col = "cyan", ...)
}
if (eda == TRUE){
  histogram(data[ ,label.names])
  #hist(data[complete.cases(data),alt.scale.label.name])
}
# https://stackoverflow.com/questions/24648729/plot-one-numeric-variable-against-n-numeric-variables-in-n-plots
ind.pairs.plot <- function(data, xvars=NULL, yvar)
{
    df <- data
    if (is.null(xvars)) {
        xvars = names(data[which(names(data)!=yvar)])       
    }   

    #choose a format to display charts
    ncharts <- length(xvars) 
    
    for(i in 1:ncharts){    
        plot(df[,xvars[i]],df[,yvar], xlab = xvars[i], ylab = yvar)
    }
}

if (eda == TRUE){
  ind.pairs.plot(data, feature.names, label.names)
}

# 
# pl <- ggplot(data, aes(x=x18, y = y3))
# pl2 <- pl + geom_point(aes(alpha = 0.1)) # default color gradient based on 'hp'
# print(pl2)

Feature Engineering

if(eda ==FALSE){
  # x18 may need transformations
  plot(data[,'x18'], data[,label.names], main = "Original Scatter Plot vs. x18", ylab = label.names, xlab = 'x18')
  plot(sqrt(data[,'x18']), data[,label.names], main = "Original Scatter Plot vs. sqrt(x18)", ylab = label.names, xlab = 'sqrt(x18)')
  
  # transforming x18
  data$sqrt.x18 = sqrt(data$x18)
  data = dplyr::select(data,-one_of('x18'))
  
  # what about x7, x9?
  # x11 looks like data is at discrete points after a while. Will this be a problem?
}

Modeling

Train Test Split

data = data[sample(nrow(data)),] # randomly shuffle data
split = sample.split(data[,label.names], SplitRatio = 0.8)

data.train = subset(data, split == TRUE)
data.test = subset(data, split == FALSE)

Common Functions

plot.diagnostics <-  function(model, train) {
  plot(model)
  
  residuals = resid(model) # Plotted above in plot(lm.out)
  r.standard = rstandard(model)
  r.student = rstudent(model)

  plot(predict(model,train),r.student,
      ylab="Student Residuals", xlab="Predicted Values", 
      main="Student Residual Plot") 
  abline(0, 0)
  
  plot(predict(model, train),r.standard,
      ylab="Standard Residuals", xlab="Predicted Values", 
      main="Standard Residual Plot") 
  abline(0, 0)
  abline(2, 0)
  abline(-2, 0)
  
  # Histogram
  hist(r.student, freq=FALSE, main="Distribution of Studentized Residuals", 
  xlab="Studentized Residuals", ylab="Density", ylim=c(0,0.5))

  # Create range of x-values for normal curve
  xfit <- seq(min(r.student)-1, max(r.student)+1, length=40)

  # Generate values from the normal distribution at the specified values
  yfit <- (dnorm(xfit))

  # Add the normal curve
  lines(xfit, yfit, ylim=c(0,0.5))
  
  
  # http://www.stat.columbia.edu/~martin/W2024/R7.pdf
  # Influential plots
  inf.meas = influence.measures(model)
  # print (summary(inf.meas)) # too much data
  
  # Leverage plot
  lev = hat(model.matrix(model))
  plot(lev, ylab = 'Leverage - check')
  
  # Cook's Distance
  cd = cooks.distance(model)
  plot(cd,ylab="Cooks distances")
  abline(4/nrow(train),0)
  abline(1,0)
  
  print (paste("Number of data points that have Cook's D > 4/n: ", length(cd[cd > 4/nrow(train)]), sep = "")) 
  print (paste("Number of data points that have Cook's D > 1: ", length(cd[cd > 1]), sep = "")) 
  return(cd)
}

# function to set up random seeds
# with small changes from:
# http://jaehyeon-kim.github.io/2015/05/Setup-Random-Seeds-on-Caret-Package.html 
setCaretSeeds <- function(method = "cv", numbers = 1, repeats = 1, tunes = NULL, seed = 1701) {
  #B is the number of resamples and integer vector of M (numbers + tune length if any)
  B <- if (method == "cv") numbers
  else if(method == "repeatedcv") numbers * repeats
  else NULL
  if(is.null(length)) {
    seeds <- NULL
  } else {
    set.seed(seed = seed)
    seeds <- vector(mode = "list", length = B)
    seeds <- lapply(seeds, function(x) sample.int(n = 1000000
                                                  , size = numbers + ifelse(is.null(tunes), 0, tunes)))
    seeds[[length(seeds) + 1]] <- sample.int(n = 1000000, size = 1)
  }
  # return seeds
  seeds
}

train.caret.glmselect = function(formula, data, method
                                 ,subopt = NULL, feature.names
                                 , train.control = NULL, tune.grid = NULL, pre.proc = NULL){
  
  if(is.null(train.control)){
    train.control <- trainControl(method = "cv"
                              ,number = 10
                              ,seeds = setCaretSeeds(method = "cv"
                                                     , numbers = 10
                                                     , seed = 1701)
                              ,search = "grid"
                              ,verboseIter = TRUE
                              ,allowParallel = TRUE
                              )
  }
  
  if(is.null(tune.grid)){
    if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
      tune.grid = data.frame(nvmax = 1:length(feature.names))
    }
    if (method == 'glmnet' && subopt == 'LASSO'){
      # Will only show 1 Lambda value during training, but that is OK
      # https://stackoverflow.com/questions/47526544/why-need-to-tune-lambda-with-carettrain-method-glmnet-and-cv-glmnet
      # Another option for LASSO is this: https://github.com/topepo/caret/blob/master/RegressionTests/Code/lasso.R
      lambda = 10^seq(-2,0, length =100)
      alpha = c(1)
      tune.grid = expand.grid(alpha = alpha,lambda = lambda)
    }
    if (method == 'lars'){
      # https://github.com/topepo/caret/blob/master/RegressionTests/Code/lars.R
      fraction = seq(0, 1, length = 100)
      tune.grid = expand.grid(fraction = fraction)
      pre.proc = c("center", "scale") 
    }
  }
  
  # http://sshaikh.org/2015/05/06/parallelize-machine-learning-in-r-with-multi-core-cpus/
  cl <- makeCluster(detectCores()*0.75) # use 75% of cores only, leave rest for other tasks
  registerDoParallel(cl)

  set.seed(1) 
  # note that the seed has to actually be set just before this function is called
  # settign is above just not ensure reproducibility for some reason
  model.caret <- caret::train(formula
                              , data = data
                              , method = method
                              , tuneGrid = tune.grid
                              , trControl = train.control
                              , preProc = pre.proc
                              )
  
  stopCluster(cl)
  registerDoSEQ() # register sequential engine in case you are not using this function anymore
  
  if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
    print("All models results")
    print(model.caret$results) # all model results
    print("Best Model")
    print(model.caret$bestTune) # best model
    
    model = model.caret$finalModel
    

    # Metrics Plot 
    dataPlot = model.caret$results %>%
      gather(key='metric',value='value',-nvmax) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=nvmax,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot
    # leap function does not support studentized residuals
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)
   
    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') + 
      theme_light()
    plot(residHistogram)
    id = rownames(model.caret$bestTune)    
    # Provides the coefficients of the best model
    # regsubsets doens return a full model (see documentation of regsubset), so we need to recalcualte themodel
    # https://stackoverflow.com/questions/13063762/how-to-obtain-a-lm-object-from-regsubsets
    print("Coefficients of final model:")
    coefs <- coef(model, id=id)
    #calculate the model to the the coef intervals
    nams <- names(coefs)
    nams <- nams[!nams %in% "(Intercept)"]
    response <-  as.character(formula[[2]])
    form <- as.formula(paste(response, paste(nams, collapse = " + "), sep = " ~ "))
    mod <- lm(form, data = data)
    #coefs
    #coef(mod)
    print(car::Confint(mod))
    return(list(model = model,id = id, residPlot = residPlot, residHistogram=residHistogram
                ,modelLM=mod))
  }
  if (method == 'glmnet' && subopt == 'LASSO'){
    print(model.caret)
    print(plot(model.caret))
    print(model.caret$bestTune)
    
    print(model.caret$results)
    model=model.caret$finalModel
    # Metrics Plot 
    dataPlot = model.caret$results %>%
      gather(key='metric',value='value',-lambda) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=lambda,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot 
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)

    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') +
      theme_light()
    plot(residHistogram)
    
    print("Coefficients") 
    #no interval for glmnet https://stackoverflow.com/questions/39750965/confidence-intervals-for-ridge-regression
    
    t=coef(model,s=model.caret$bestTune$lambda)
    model.coef = t[which(t[,1]!=0),]
    print(as.data.frame(model.coef))
    id = NULL # not really needed but added for consistency
    return(list(model = model.caret,id = id, residPlot = residPlot, metricsPlot=metricsPlot ))
  }
  if (method == 'lars'){
    print(model.caret)
    print(plot(model.caret))
    print(model.caret$bestTune)
    
    # Metrics Plot
    dataPlot = model.caret$results %>%
        gather(key='metric',value='value',-fraction) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=fraction,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)

    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') + 
      theme_light()
    plot(residHistogram)
    
    print("Coefficients") 
    t=coef(model.caret$finalModel,s=model.caret$bestTune$fraction,mode='fraction')
    model.coef = t[which(t!=0)]
    print(model.coef)
    id = NULL # not really needed but added for consistency
    return(list(model = model.caret,id = id, residPlot = residPlot, residHistogram=residHistogram))
  }
}

# https://stackoverflow.com/questions/48265743/linear-model-subset-selection-goodness-of-fit-with-k-fold-cross-validation
# changed slightly since call[[2]] was just returning "formula" without actually returnign the value in formula
predict.regsubsets <- function(object, newdata, id, formula, ...) {
    #form <- as.formula(object$call[[2]])
    mat <- model.matrix(formula, newdata) # adds intercept and expands any interaction terms
    coefi <- coef(object, id = id)
    xvars <- names(coefi)
    return(mat[,xvars]%*%coefi)
}
  
test.model = function(model, test, level=0.95
                      ,draw.limits = FALSE, good = 0.1, ok = 0.15
                      ,method = NULL, subopt = NULL
                      ,id = NULL, formula, feature.names, label.names
                      ,transformation = NULL){
  ## if using caret for glm select equivalent functionality, 
  ## need to pass formula (full is ok as it will select subset of variables from there)
  if (is.null(method)){
    pred = predict(model, newdata=test, interval="confidence", level = level) 
  }
  
  if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
    pred = predict.regsubsets(model, newdata = test, id = id, formula = formula)
  }
  
  if (method == 'glmnet' && subopt == 'LASSO'){
    xtest = as.matrix(test[,feature.names]) 
    pred=as.data.frame(predict(model, xtest))
  }
  
  if (method == 'lars'){
    pred=as.data.frame(predict(model, newdata = test))
  }
    
  # Summary of predicted values
  print ("Summary of predicted values: ")
  print(summary(pred[,1]))

  test.mse = mean((test[,label.names]-pred[,1])^2)
  print (paste(method, subopt, "Test MSE:", test.mse, sep=" "))
  
  if(log.pred == TRUE || norm.pred == TRUE){
    # plot transformewd comparison first
    plot(test[,label.names],pred[,1],xlab = "Actual (Transformed)", ylab = "Predicted (Transformed)")
  }
    
  if (log.pred == FALSE && norm.pred == FALSE){
    x = test[,label.names]
    y = pred[,1]
  }
  if (log.pred == TRUE){
    x = 10^test[,label.names]
    y = 10^pred[,1]  
  }
  if (norm.pred == TRUE){
    x = predict(transformation, test[,label.names], inverse = TRUE)
    y = predict(transformation, pred[,1], inverse = TRUE)
  }

  plot(x, y, xlab = "Actual", ylab = "Predicted")
  abline(0,(1+good),col='green', lwd = 3)
  abline(0,(1-good),col='green', lwd = 3)
  abline(0,(1+ok),col='blue', lwd = 3)
  abline(0,(1-ok),col='blue', lwd = 3)
  
}

Setup Formulae

n <- names(data.train)
 formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~", paste(n[!n %in% label.names], collapse = " + "))) 

# ind.interact = c("x4","x7","x8", "x9", "x10", "x11", "x14", "x16", "x17", "x21", "sqrt.x18")
# ind.nointeract = c("stat13", "stat14", "stat24", "stat60", "stat98", "stat110", "stat144", "stat149")
# 
# interact = paste(ind.interact, collapse = " + ")
# nointeract = paste(ind.nointeract, collapse = " + ")
# 
# # ^2 is 2 way interaction, ^3 is 3 way interaction
# formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + "), "~ (", interact, " )^2 ", " + ", nointeract ))
# 
# # # * is all way interaction
# # formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + "), "~ (", interact, " ) ", " + ", nointeract ))

grand.mean.formula = as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~ 1"))

print(formula)
## log.y3 ~ x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10 + x11 + 
##     x12 + x13 + x14 + x15 + x16 + x17 + x19 + x20 + x21 + x22 + 
##     x23 + stat1 + stat2 + stat3 + stat4 + stat5 + stat6 + stat7 + 
##     stat8 + stat9 + stat10 + stat11 + stat12 + stat13 + stat14 + 
##     stat15 + stat16 + stat17 + stat18 + stat19 + stat20 + stat21 + 
##     stat22 + stat23 + stat24 + stat25 + stat26 + stat27 + stat28 + 
##     stat29 + stat30 + stat31 + stat32 + stat33 + stat34 + stat35 + 
##     stat36 + stat37 + stat38 + stat39 + stat40 + stat41 + stat42 + 
##     stat43 + stat44 + stat45 + stat46 + stat47 + stat48 + stat49 + 
##     stat50 + stat51 + stat52 + stat53 + stat54 + stat55 + stat56 + 
##     stat57 + stat58 + stat59 + stat60 + stat61 + stat62 + stat63 + 
##     stat64 + stat65 + stat66 + stat67 + stat68 + stat69 + stat70 + 
##     stat71 + stat72 + stat73 + stat74 + stat75 + stat76 + stat77 + 
##     stat78 + stat79 + stat80 + stat81 + stat82 + stat83 + stat84 + 
##     stat85 + stat86 + stat87 + stat88 + stat89 + stat90 + stat91 + 
##     stat92 + stat93 + stat94 + stat95 + stat96 + stat97 + stat98 + 
##     stat99 + stat100 + stat101 + stat102 + stat103 + stat104 + 
##     stat105 + stat106 + stat107 + stat108 + stat109 + stat110 + 
##     stat111 + stat112 + stat113 + stat114 + stat115 + stat116 + 
##     stat117 + stat118 + stat119 + stat120 + stat121 + stat122 + 
##     stat123 + stat124 + stat125 + stat126 + stat127 + stat128 + 
##     stat129 + stat130 + stat131 + stat132 + stat133 + stat134 + 
##     stat135 + stat136 + stat137 + stat138 + stat139 + stat140 + 
##     stat141 + stat142 + stat143 + stat144 + stat145 + stat146 + 
##     stat147 + stat148 + stat149 + stat150 + stat151 + stat152 + 
##     stat153 + stat154 + stat155 + stat156 + stat157 + stat158 + 
##     stat159 + stat160 + stat161 + stat162 + stat163 + stat164 + 
##     stat165 + stat166 + stat167 + stat168 + stat169 + stat170 + 
##     stat171 + stat172 + stat173 + stat174 + stat175 + stat176 + 
##     stat177 + stat178 + stat179 + stat180 + stat181 + stat182 + 
##     stat183 + stat184 + stat185 + stat186 + stat187 + stat188 + 
##     stat189 + stat190 + stat191 + stat192 + stat193 + stat194 + 
##     stat195 + stat196 + stat197 + stat198 + stat199 + stat200 + 
##     stat201 + stat202 + stat203 + stat204 + stat205 + stat206 + 
##     stat207 + stat208 + stat209 + stat210 + stat211 + stat212 + 
##     stat213 + stat214 + stat215 + stat216 + stat217 + sqrt.x18
print(grand.mean.formula)
## log.y3 ~ 1
# Update feature.names because we may have transformed some features
feature.names = n[!n %in% label.names]

Full Model

model.full = lm(formula , data.train)
summary(model.full)
## 
## Call:
## lm(formula = formula, data = data.train)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.077993 -0.020451 -0.004536  0.016021  0.162395 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  1.971e+00  9.493e-03 207.622  < 2e-16 ***
## x1           3.898e-04  6.490e-04   0.601 0.548073    
## x2          -6.039e-05  4.184e-04  -0.144 0.885256    
## x3           8.722e-05  1.138e-04   0.766 0.443471    
## x4          -5.227e-05  9.040e-06  -5.782 7.82e-09 ***
## x5           4.870e-04  2.953e-04   1.649 0.099170 .  
## x6           3.630e-04  5.952e-04   0.610 0.541941    
## x7           1.134e-02  6.378e-04  17.775  < 2e-16 ***
## x8           4.228e-04  1.482e-04   2.854 0.004333 ** 
## x9           3.315e-03  3.293e-04  10.066  < 2e-16 ***
## x10          1.110e-03  3.087e-04   3.595 0.000327 ***
## x11          1.630e+05  7.377e+04   2.210 0.027156 *  
## x12         -2.523e-04  1.881e-04  -1.342 0.179750    
## x13          7.875e-05  7.507e-05   1.049 0.294185    
## x14         -3.995e-04  3.213e-04  -1.243 0.213796    
## x15          8.943e-05  3.083e-04   0.290 0.771799    
## x16          8.688e-04  2.139e-04   4.061 4.95e-05 ***
## x17          1.640e-03  3.277e-04   5.006 5.73e-07 ***
## x19          9.494e-05  1.649e-04   0.576 0.564791    
## x20         -1.081e-03  1.153e-03  -0.937 0.348594    
## x21          1.298e-04  4.197e-05   3.093 0.001993 ** 
## x22         -3.259e-04  3.438e-04  -0.948 0.343175    
## x23         -6.680e-05  3.263e-04  -0.205 0.837829    
## stat1       -2.113e-04  2.481e-04  -0.851 0.394546    
## stat2       -7.467e-05  2.465e-04  -0.303 0.761968    
## stat3        1.597e-04  2.476e-04   0.645 0.518921    
## stat4       -4.713e-04  2.482e-04  -1.899 0.057603 .  
## stat5       -2.015e-04  2.490e-04  -0.809 0.418337    
## stat6       -3.109e-04  2.483e-04  -1.252 0.210525    
## stat7       -8.818e-05  2.461e-04  -0.358 0.720107    
## stat8        4.219e-04  2.478e-04   1.702 0.088762 .  
## stat9        3.429e-05  2.473e-04   0.139 0.889737    
## stat10      -1.693e-04  2.467e-04  -0.686 0.492559    
## stat11      -5.944e-04  2.513e-04  -2.366 0.018030 *  
## stat12       3.653e-04  2.474e-04   1.477 0.139855    
## stat13      -4.860e-04  2.465e-04  -1.971 0.048724 *  
## stat14      -8.478e-04  2.479e-04  -3.420 0.000630 ***
## stat15      -2.676e-04  2.452e-04  -1.091 0.275124    
## stat16       6.758e-05  2.485e-04   0.272 0.785662    
## stat17       9.891e-05  2.459e-04   0.402 0.687548    
## stat18      -3.333e-04  2.474e-04  -1.347 0.177938    
## stat19       1.886e-04  2.457e-04   0.768 0.442679    
## stat20      -4.145e-04  2.479e-04  -1.672 0.094620 .  
## stat21      -1.084e-04  2.484e-04  -0.436 0.662571    
## stat22      -4.333e-04  2.484e-04  -1.744 0.081158 .  
## stat23       6.092e-04  2.463e-04   2.474 0.013403 *  
## stat24      -3.643e-04  2.477e-04  -1.470 0.141552    
## stat25      -4.362e-04  2.466e-04  -1.769 0.076888 .  
## stat26      -2.142e-04  2.471e-04  -0.867 0.386041    
## stat27       1.381e-04  2.471e-04   0.559 0.576203    
## stat28       1.178e-05  2.481e-04   0.047 0.962124    
## stat29       4.691e-04  2.504e-04   1.873 0.061063 .  
## stat30       3.973e-04  2.495e-04   1.592 0.111413    
## stat31      -1.334e-04  2.508e-04  -0.532 0.594874    
## stat32      -9.001e-05  2.498e-04  -0.360 0.718642    
## stat33      -2.402e-04  2.468e-04  -0.973 0.330400    
## stat34       2.238e-04  2.463e-04   0.909 0.363491    
## stat35      -1.973e-04  2.499e-04  -0.789 0.429906    
## stat36      -1.917e-04  2.470e-04  -0.776 0.437649    
## stat37       1.780e-06  2.480e-04   0.007 0.994274    
## stat38       2.470e-04  2.476e-04   0.998 0.318510    
## stat39      -2.595e-04  2.471e-04  -1.050 0.293792    
## stat40       2.088e-04  2.480e-04   0.842 0.399788    
## stat41      -4.855e-04  2.451e-04  -1.981 0.047638 *  
## stat42      -1.486e-04  2.464e-04  -0.603 0.546469    
## stat43      -1.828e-04  2.487e-04  -0.735 0.462451    
## stat44       2.138e-04  2.479e-04   0.862 0.388577    
## stat45      -1.997e-04  2.470e-04  -0.809 0.418814    
## stat46       4.219e-04  2.469e-04   1.709 0.087591 .  
## stat47       8.154e-05  2.491e-04   0.327 0.743482    
## stat48       5.522e-04  2.471e-04   2.234 0.025506 *  
## stat49       1.220e-04  2.467e-04   0.495 0.620902    
## stat50       2.630e-04  2.453e-04   1.072 0.283685    
## stat51       3.267e-04  2.488e-04   1.313 0.189169    
## stat52      -6.985e-06  2.474e-04  -0.028 0.977476    
## stat53      -2.579e-04  2.495e-04  -1.034 0.301171    
## stat54      -3.376e-04  2.494e-04  -1.354 0.175928    
## stat55       3.675e-04  2.453e-04   1.498 0.134126    
## stat56      -2.105e-04  2.488e-04  -0.846 0.397413    
## stat57      -6.878e-05  2.443e-04  -0.282 0.778295    
## stat58       4.803e-06  2.455e-04   0.020 0.984391    
## stat59       3.009e-04  2.475e-04   1.216 0.224057    
## stat60       3.978e-04  2.477e-04   1.606 0.108326    
## stat61      -1.098e-04  2.491e-04  -0.441 0.659253    
## stat62      -2.184e-04  2.468e-04  -0.885 0.376304    
## stat63       2.496e-04  2.486e-04   1.004 0.315282    
## stat64      -1.323e-04  2.466e-04  -0.536 0.591739    
## stat65      -3.377e-04  2.483e-04  -1.360 0.173930    
## stat66       2.018e-04  2.502e-04   0.807 0.419930    
## stat67       1.570e-04  2.489e-04   0.631 0.528122    
## stat68      -1.083e-04  2.491e-04  -0.435 0.663790    
## stat69       1.639e-05  2.478e-04   0.066 0.947267    
## stat70       2.107e-04  2.473e-04   0.852 0.394189    
## stat71      -1.577e-04  2.458e-04  -0.641 0.521327    
## stat72       3.039e-04  2.487e-04   1.222 0.221914    
## stat73       3.869e-04  2.475e-04   1.563 0.118111    
## stat74      -1.561e-04  2.479e-04  -0.630 0.529033    
## stat75      -1.808e-04  2.488e-04  -0.727 0.467447    
## stat76      -3.316e-05  2.486e-04  -0.133 0.893892    
## stat77      -1.908e-04  2.474e-04  -0.771 0.440518    
## stat78      -4.877e-05  2.466e-04  -0.198 0.843239    
## stat79      -3.563e-04  2.488e-04  -1.432 0.152139    
## stat80       2.902e-05  2.491e-04   0.117 0.907240    
## stat81       2.602e-04  2.486e-04   1.047 0.295368    
## stat82       4.368e-04  2.455e-04   1.779 0.075308 .  
## stat83      -9.495e-05  2.464e-04  -0.385 0.699990    
## stat84      -1.553e-04  2.488e-04  -0.624 0.532657    
## stat85       1.263e-05  2.480e-04   0.051 0.959383    
## stat86       8.728e-05  2.483e-04   0.352 0.725199    
## stat87      -2.190e-04  2.481e-04  -0.883 0.377522    
## stat88      -9.664e-05  2.460e-04  -0.393 0.694486    
## stat89      -3.951e-04  2.468e-04  -1.601 0.109500    
## stat90      -2.243e-04  2.472e-04  -0.908 0.364137    
## stat91      -4.658e-04  2.455e-04  -1.897 0.057873 .  
## stat92      -5.847e-04  2.487e-04  -2.351 0.018737 *  
## stat93      -2.693e-04  2.505e-04  -1.075 0.282499    
## stat94      -2.923e-04  2.476e-04  -1.181 0.237758    
## stat95       6.861e-05  2.475e-04   0.277 0.781605    
## stat96      -4.230e-04  2.465e-04  -1.716 0.086221 .  
## stat97       1.722e-04  2.454e-04   0.702 0.482902    
## stat98       3.572e-03  2.439e-04  14.647  < 2e-16 ***
## stat99       2.941e-04  2.487e-04   1.183 0.236990    
## stat100      5.117e-04  2.474e-04   2.068 0.038646 *  
## stat101     -2.334e-04  2.494e-04  -0.936 0.349515    
## stat102      5.127e-05  2.492e-04   0.206 0.836960    
## stat103     -3.668e-04  2.495e-04  -1.470 0.141526    
## stat104     -3.421e-04  2.472e-04  -1.384 0.166477    
## stat105      2.775e-04  2.444e-04   1.135 0.256259    
## stat106     -2.607e-04  2.457e-04  -1.061 0.288727    
## stat107     -1.066e-04  2.476e-04  -0.431 0.666757    
## stat108     -6.346e-05  2.479e-04  -0.256 0.797934    
## stat109     -1.500e-04  2.457e-04  -0.611 0.541491    
## stat110     -3.475e-03  2.458e-04 -14.135  < 2e-16 ***
## stat111     -3.354e-04  2.485e-04  -1.349 0.177281    
## stat112      1.232e-05  2.481e-04   0.050 0.960401    
## stat113     -2.395e-04  2.498e-04  -0.959 0.337777    
## stat114      1.888e-04  2.472e-04   0.764 0.444945    
## stat115      1.714e-04  2.457e-04   0.698 0.485296    
## stat116      3.606e-04  2.490e-04   1.448 0.147625    
## stat117      5.019e-05  2.513e-04   0.200 0.841721    
## stat118     -2.486e-04  2.451e-04  -1.014 0.310510    
## stat119      1.057e-05  2.468e-04   0.043 0.965827    
## stat120      1.596e-04  2.457e-04   0.650 0.515876    
## stat121     -1.399e-04  2.489e-04  -0.562 0.574143    
## stat122     -1.098e-04  2.459e-04  -0.447 0.655168    
## stat123     -1.476e-04  2.498e-04  -0.591 0.554627    
## stat124      1.274e-05  2.473e-04   0.052 0.958896    
## stat125     -1.298e-05  2.475e-04  -0.052 0.958188    
## stat126      3.810e-05  2.481e-04   0.154 0.877968    
## stat127     -1.587e-05  2.475e-04  -0.064 0.948883    
## stat128     -2.059e-04  2.481e-04  -0.830 0.406721    
## stat129      1.536e-04  2.468e-04   0.622 0.533740    
## stat130      1.779e-04  2.490e-04   0.714 0.474951    
## stat131      2.594e-04  2.481e-04   1.046 0.295732    
## stat132     -1.444e-04  2.464e-04  -0.586 0.557925    
## stat133      1.365e-04  2.461e-04   0.555 0.579118    
## stat134     -1.184e-04  2.469e-04  -0.480 0.631502    
## stat135     -4.986e-05  2.474e-04  -0.202 0.840245    
## stat136     -1.373e-04  2.471e-04  -0.556 0.578394    
## stat137      1.237e-04  2.464e-04   0.502 0.615629    
## stat138      3.275e-05  2.473e-04   0.132 0.894663    
## stat139      9.650e-05  2.496e-04   0.387 0.699031    
## stat140     -1.539e-04  2.457e-04  -0.627 0.530939    
## stat141      2.662e-04  2.466e-04   1.080 0.280398    
## stat142     -1.774e-04  2.493e-04  -0.711 0.476816    
## stat143      1.715e-04  2.466e-04   0.695 0.486847    
## stat144      4.855e-04  2.471e-04   1.965 0.049521 *  
## stat145     -8.771e-05  2.513e-04  -0.349 0.727087    
## stat146     -5.407e-04  2.492e-04  -2.169 0.030100 *  
## stat147     -4.407e-04  2.489e-04  -1.771 0.076659 .  
## stat148     -5.745e-04  2.445e-04  -2.350 0.018814 *  
## stat149     -4.387e-04  2.485e-04  -1.765 0.077550 .  
## stat150     -1.958e-06  2.479e-04  -0.008 0.993697    
## stat151     -1.289e-04  2.505e-04  -0.515 0.606810    
## stat152     -1.427e-04  2.472e-04  -0.577 0.563778    
## stat153     -6.371e-05  2.517e-04  -0.253 0.800187    
## stat154      1.368e-04  2.496e-04   0.548 0.583721    
## stat155     -8.694e-05  2.463e-04  -0.353 0.724109    
## stat156      4.361e-04  2.487e-04   1.754 0.079570 .  
## stat157     -3.113e-05  2.462e-04  -0.126 0.899387    
## stat158     -1.715e-04  2.510e-04  -0.683 0.494577    
## stat159     -2.759e-05  2.458e-04  -0.112 0.910629    
## stat160     -1.076e-04  2.482e-04  -0.433 0.664683    
## stat161      2.773e-04  2.484e-04   1.116 0.264260    
## stat162     -1.009e-04  2.458e-04  -0.411 0.681279    
## stat163     -3.573e-06  2.519e-04  -0.014 0.988687    
## stat164      3.256e-04  2.498e-04   1.303 0.192529    
## stat165     -2.327e-04  2.458e-04  -0.947 0.343672    
## stat166     -7.703e-05  2.447e-04  -0.315 0.752930    
## stat167     -1.323e-04  2.461e-04  -0.537 0.590975    
## stat168     -1.778e-04  2.470e-04  -0.720 0.471729    
## stat169     -3.007e-05  2.480e-04  -0.121 0.903512    
## stat170     -5.112e-04  2.475e-04  -2.065 0.038941 *  
## stat171     -3.069e-05  2.484e-04  -0.124 0.901672    
## stat172      2.361e-04  2.452e-04   0.963 0.335543    
## stat173     -1.503e-04  2.492e-04  -0.603 0.546336    
## stat174     -2.015e-05  2.485e-04  -0.081 0.935386    
## stat175     -4.096e-04  2.475e-04  -1.655 0.097992 .  
## stat176      1.598e-04  2.473e-04   0.646 0.518299    
## stat177     -2.160e-04  2.491e-04  -0.867 0.385882    
## stat178     -1.131e-04  2.508e-04  -0.451 0.652175    
## stat179      4.009e-05  2.474e-04   0.162 0.871251    
## stat180     -6.651e-05  2.454e-04  -0.271 0.786347    
## stat181      3.232e-04  2.472e-04   1.308 0.191088    
## stat182     -4.857e-05  2.489e-04  -0.195 0.845296    
## stat183      1.097e-04  2.463e-04   0.445 0.656191    
## stat184     -2.388e-05  2.488e-04  -0.096 0.923552    
## stat185     -8.766e-05  2.447e-04  -0.358 0.720166    
## stat186     -2.045e-04  2.490e-04  -0.822 0.411344    
## stat187     -3.207e-04  2.473e-04  -1.297 0.194781    
## stat188      1.041e-04  2.475e-04   0.421 0.674067    
## stat189      1.804e-05  2.480e-04   0.073 0.942016    
## stat190      1.179e-04  2.464e-04   0.478 0.632375    
## stat191     -2.030e-04  2.480e-04  -0.818 0.413115    
## stat192     -9.681e-05  2.498e-04  -0.388 0.698345    
## stat193      3.919e-05  2.494e-04   0.157 0.875144    
## stat194      1.263e-04  2.473e-04   0.511 0.609566    
## stat195      2.411e-04  2.463e-04   0.979 0.327727    
## stat196     -3.226e-05  2.522e-04  -0.128 0.898231    
## stat197      3.063e-04  2.452e-04   1.249 0.211628    
## stat198     -3.912e-04  2.478e-04  -1.579 0.114397    
## stat199      3.628e-04  2.467e-04   1.471 0.141452    
## stat200     -1.312e-04  2.446e-04  -0.536 0.591880    
## stat201     -8.278e-05  2.477e-04  -0.334 0.738227    
## stat202     -2.839e-04  2.505e-04  -1.134 0.257027    
## stat203      1.955e-04  2.467e-04   0.792 0.428156    
## stat204     -5.535e-04  2.455e-04  -2.254 0.024215 *  
## stat205     -1.589e-04  2.469e-04  -0.643 0.519952    
## stat206      7.577e-06  2.493e-04   0.030 0.975758    
## stat207      3.697e-04  2.486e-04   1.487 0.137053    
## stat208      5.733e-05  2.484e-04   0.231 0.817478    
## stat209     -9.654e-05  2.472e-04  -0.391 0.696103    
## stat210     -5.136e-05  2.478e-04  -0.207 0.835852    
## stat211     -1.589e-04  2.473e-04  -0.643 0.520462    
## stat212     -1.217e-05  2.463e-04  -0.049 0.960598    
## stat213     -1.310e-04  2.498e-04  -0.525 0.599860    
## stat214     -5.351e-04  2.484e-04  -2.154 0.031267 *  
## stat215     -1.934e-04  2.482e-04  -0.779 0.436077    
## stat216      5.637e-06  2.481e-04   0.023 0.981877    
## stat217      3.344e-04  2.494e-04   1.341 0.180122    
## sqrt.x18     2.646e-02  9.460e-04  27.973  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.03134 on 5343 degrees of freedom
## Multiple R-squared:  0.2763, Adjusted R-squared:  0.2438 
## F-statistic:   8.5 on 240 and 5343 DF,  p-value: < 2.2e-16
cd.full = plot.diagnostics(model.full, data.train)

## [1] "Number of data points that have Cook's D > 4/n: 285"
## [1] "Number of data points that have Cook's D > 1: 0"

Checking with removal of high influence points

high.cd = names(cd.full[cd.full > 4/nrow(data.train)])
data.train2 = data.train[!(rownames(data.train)) %in% high.cd,]
model.full2 = lm(formula , data.train2)
summary(model.full2)
## 
## Call:
## lm(formula = formula, data = data.train2)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.057764 -0.017607 -0.002282  0.016174  0.072361 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  1.956e+00  7.813e-03 250.312  < 2e-16 ***
## x1           2.371e-04  5.337e-04   0.444 0.656905    
## x2          -8.321e-05  3.442e-04  -0.242 0.808977    
## x3           6.832e-05  9.321e-05   0.733 0.463616    
## x4          -6.026e-05  7.439e-06  -8.100 6.84e-16 ***
## x5           5.387e-04  2.421e-04   2.225 0.026097 *  
## x6          -1.336e-05  4.887e-04  -0.027 0.978186    
## x7           1.209e-02  5.236e-04  23.100  < 2e-16 ***
## x8           5.677e-04  1.219e-04   4.658 3.27e-06 ***
## x9           3.307e-03  2.700e-04  12.248  < 2e-16 ***
## x10          1.432e-03  2.539e-04   5.639 1.80e-08 ***
## x11          2.186e+05  6.071e+04   3.601 0.000320 ***
## x12         -9.626e-05  1.538e-04  -0.626 0.531470    
## x13          1.082e-04  6.180e-05   1.752 0.079914 .  
## x14         -3.101e-04  2.643e-04  -1.173 0.240742    
## x15          8.457e-05  2.529e-04   0.334 0.738055    
## x16          9.330e-04  1.756e-04   5.312 1.13e-07 ***
## x17          1.687e-03  2.692e-04   6.267 4.00e-10 ***
## x19          1.501e-04  1.355e-04   1.108 0.267749    
## x20         -1.045e-03  9.502e-04  -1.100 0.271599    
## x21          1.513e-04  3.448e-05   4.388 1.17e-05 ***
## x22         -5.067e-04  2.817e-04  -1.799 0.072071 .  
## x23          9.734e-05  2.683e-04   0.363 0.716803    
## stat1       -4.746e-04  2.037e-04  -2.330 0.019858 *  
## stat2       -9.529e-05  2.024e-04  -0.471 0.637817    
## stat3        3.794e-04  2.034e-04   1.865 0.062175 .  
## stat4       -6.793e-04  2.039e-04  -3.331 0.000872 ***
## stat5       -3.270e-04  2.047e-04  -1.597 0.110242    
## stat6       -3.697e-04  2.038e-04  -1.814 0.069745 .  
## stat7       -1.755e-04  2.017e-04  -0.870 0.384171    
## stat8        1.470e-04  2.032e-04   0.723 0.469473    
## stat9        4.775e-05  2.032e-04   0.235 0.814253    
## stat10      -4.665e-05  2.022e-04  -0.231 0.817514    
## stat11      -6.198e-04  2.064e-04  -3.003 0.002685 ** 
## stat12       2.150e-04  2.030e-04   1.059 0.289679    
## stat13      -3.696e-04  2.022e-04  -1.828 0.067677 .  
## stat14      -1.079e-03  2.034e-04  -5.302 1.19e-07 ***
## stat15      -4.649e-04  2.019e-04  -2.302 0.021368 *  
## stat16      -9.285e-05  2.035e-04  -0.456 0.648211    
## stat17       6.592e-05  2.022e-04   0.326 0.744363    
## stat18      -2.737e-04  2.028e-04  -1.350 0.177236    
## stat19       2.020e-04  2.022e-04   0.999 0.317647    
## stat20      -1.508e-04  2.036e-04  -0.740 0.459055    
## stat21      -2.198e-04  2.040e-04  -1.078 0.281271    
## stat22      -1.391e-04  2.036e-04  -0.683 0.494503    
## stat23       4.868e-04  2.028e-04   2.401 0.016405 *  
## stat24      -3.494e-04  2.034e-04  -1.718 0.085882 .  
## stat25      -2.654e-04  2.025e-04  -1.311 0.190086    
## stat26      -2.646e-04  2.034e-04  -1.301 0.193281    
## stat27       1.608e-05  2.034e-04   0.079 0.936987    
## stat28       3.620e-05  2.039e-04   0.178 0.859074    
## stat29       4.720e-04  2.057e-04   2.295 0.021795 *  
## stat30       2.939e-04  2.043e-04   1.439 0.150335    
## stat31      -1.054e-04  2.057e-04  -0.512 0.608552    
## stat32      -1.554e-05  2.055e-04  -0.076 0.939724    
## stat33      -1.829e-04  2.028e-04  -0.902 0.367136    
## stat34       3.613e-04  2.024e-04   1.786 0.074229 .  
## stat35      -2.456e-04  2.054e-04  -1.196 0.231875    
## stat36      -1.038e-04  2.034e-04  -0.511 0.609714    
## stat37       1.526e-04  2.040e-04   0.748 0.454333    
## stat38       2.791e-04  2.032e-04   1.373 0.169753    
## stat39      -2.599e-04  2.026e-04  -1.283 0.199676    
## stat40       1.155e-05  2.041e-04   0.057 0.954862    
## stat41      -4.344e-04  2.011e-04  -2.160 0.030845 *  
## stat42      -2.137e-04  2.025e-04  -1.055 0.291417    
## stat43      -1.822e-04  2.041e-04  -0.893 0.372128    
## stat44       3.264e-04  2.037e-04   1.603 0.109087    
## stat45      -2.707e-04  2.027e-04  -1.335 0.181862    
## stat46       2.665e-04  2.031e-04   1.312 0.189648    
## stat47       1.849e-04  2.044e-04   0.904 0.365820    
## stat48       5.162e-04  2.026e-04   2.548 0.010870 *  
## stat49      -1.036e-04  2.026e-04  -0.511 0.609220    
## stat50       3.400e-04  2.014e-04   1.688 0.091407 .  
## stat51       2.214e-04  2.043e-04   1.084 0.278350    
## stat52       1.752e-04  2.035e-04   0.861 0.389434    
## stat53      -3.295e-04  2.049e-04  -1.608 0.107829    
## stat54      -5.018e-04  2.049e-04  -2.449 0.014351 *  
## stat55       2.489e-04  2.017e-04   1.234 0.217281    
## stat56       1.299e-04  2.043e-04   0.636 0.524986    
## stat57      -1.748e-04  2.011e-04  -0.869 0.384739    
## stat58      -1.142e-04  2.013e-04  -0.567 0.570634    
## stat59       3.695e-04  2.032e-04   1.818 0.069052 .  
## stat60       4.674e-04  2.033e-04   2.299 0.021556 *  
## stat61      -1.751e-04  2.047e-04  -0.855 0.392371    
## stat62      -4.400e-04  2.026e-04  -2.171 0.029951 *  
## stat63       1.807e-04  2.043e-04   0.884 0.376476    
## stat64       2.459e-05  2.025e-04   0.121 0.903371    
## stat65      -8.369e-05  2.038e-04  -0.411 0.681381    
## stat66       1.854e-04  2.056e-04   0.902 0.367195    
## stat67       2.233e-04  2.046e-04   1.092 0.275062    
## stat68      -1.219e-04  2.047e-04  -0.595 0.551592    
## stat69      -3.090e-05  2.037e-04  -0.152 0.879468    
## stat70      -3.373e-05  2.033e-04  -0.166 0.868228    
## stat71      -1.051e-05  2.023e-04  -0.052 0.958584    
## stat72       2.143e-04  2.042e-04   1.049 0.294236    
## stat73       3.952e-04  2.034e-04   1.943 0.052129 .  
## stat74       1.365e-04  2.035e-04   0.671 0.502308    
## stat75       1.142e-04  2.043e-04   0.559 0.576009    
## stat76      -6.345e-05  2.040e-04  -0.311 0.755845    
## stat77       2.374e-05  2.035e-04   0.117 0.907110    
## stat78      -2.712e-04  2.019e-04  -1.343 0.179218    
## stat79      -2.017e-04  2.039e-04  -0.989 0.322586    
## stat80       4.108e-05  2.047e-04   0.201 0.840987    
## stat81       1.067e-04  2.044e-04   0.522 0.601584    
## stat82       2.137e-04  2.019e-04   1.059 0.289849    
## stat83       1.144e-05  2.019e-04   0.057 0.954805    
## stat84      -2.391e-04  2.046e-04  -1.169 0.242527    
## stat85      -1.580e-04  2.039e-04  -0.775 0.438429    
## stat86       1.469e-04  2.041e-04   0.720 0.471734    
## stat87      -6.711e-05  2.034e-04  -0.330 0.741469    
## stat88       3.071e-05  2.021e-04   0.152 0.879224    
## stat89      -1.290e-04  2.034e-04  -0.634 0.526082    
## stat90      -3.016e-04  2.030e-04  -1.486 0.137457    
## stat91      -4.318e-04  2.011e-04  -2.147 0.031827 *  
## stat92      -5.139e-04  2.041e-04  -2.518 0.011821 *  
## stat93      -3.257e-04  2.066e-04  -1.576 0.115023    
## stat94      -1.897e-04  2.033e-04  -0.933 0.350852    
## stat95       3.435e-04  2.037e-04   1.686 0.091824 .  
## stat96      -3.578e-04  2.026e-04  -1.766 0.077394 .  
## stat97       2.705e-04  2.016e-04   1.342 0.179664    
## stat98       3.381e-03  2.004e-04  16.875  < 2e-16 ***
## stat99       3.709e-04  2.041e-04   1.817 0.069233 .  
## stat100      5.426e-04  2.032e-04   2.670 0.007604 ** 
## stat101      7.108e-06  2.049e-04   0.035 0.972327    
## stat102     -2.074e-05  2.044e-04  -0.101 0.919183    
## stat103     -3.540e-04  2.045e-04  -1.731 0.083455 .  
## stat104     -2.328e-04  2.034e-04  -1.145 0.252435    
## stat105      1.779e-04  2.010e-04   0.885 0.376046    
## stat106     -2.580e-04  2.017e-04  -1.279 0.200889    
## stat107     -1.044e-04  2.034e-04  -0.513 0.607844    
## stat108     -1.685e-05  2.036e-04  -0.083 0.934052    
## stat109     -2.847e-04  2.019e-04  -1.410 0.158610    
## stat110     -3.415e-03  2.016e-04 -16.936  < 2e-16 ***
## stat111     -2.108e-04  2.037e-04  -1.035 0.300752    
## stat112      1.842e-05  2.041e-04   0.090 0.928092    
## stat113     -1.416e-04  2.051e-04  -0.690 0.489932    
## stat114      4.340e-04  2.030e-04   2.138 0.032574 *  
## stat115      2.710e-04  2.019e-04   1.343 0.179440    
## stat116      3.376e-04  2.044e-04   1.651 0.098761 .  
## stat117     -1.379e-05  2.058e-04  -0.067 0.946567    
## stat118     -2.965e-05  2.009e-04  -0.148 0.882642    
## stat119      1.510e-04  2.027e-04   0.745 0.456164    
## stat120      2.340e-05  2.020e-04   0.116 0.907776    
## stat121     -1.375e-04  2.043e-04  -0.673 0.500917    
## stat122     -3.375e-05  2.025e-04  -0.167 0.867598    
## stat123      3.391e-05  2.050e-04   0.165 0.868627    
## stat124      4.695e-05  2.030e-04   0.231 0.817108    
## stat125     -4.783e-06  2.037e-04  -0.023 0.981265    
## stat126      1.356e-04  2.040e-04   0.665 0.506176    
## stat127     -2.830e-05  2.030e-04  -0.139 0.889131    
## stat128     -4.328e-04  2.035e-04  -2.126 0.033522 *  
## stat129      1.127e-04  2.027e-04   0.556 0.578187    
## stat130      9.453e-05  2.043e-04   0.463 0.643563    
## stat131      2.450e-04  2.037e-04   1.203 0.229135    
## stat132     -2.661e-04  2.027e-04  -1.313 0.189178    
## stat133      3.402e-04  2.026e-04   1.680 0.093116 .  
## stat134     -7.848e-05  2.027e-04  -0.387 0.698703    
## stat135      1.039e-05  2.033e-04   0.051 0.959221    
## stat136     -3.993e-04  2.027e-04  -1.970 0.048877 *  
## stat137      2.493e-04  2.021e-04   1.233 0.217507    
## stat138      4.045e-05  2.034e-04   0.199 0.842352    
## stat139     -4.825e-05  2.048e-04  -0.236 0.813744    
## stat140     -2.081e-04  2.014e-04  -1.033 0.301531    
## stat141      4.193e-04  2.023e-04   2.073 0.038221 *  
## stat142      1.830e-05  2.049e-04   0.089 0.928834    
## stat143      1.457e-04  2.024e-04   0.720 0.471528    
## stat144      4.242e-04  2.028e-04   2.092 0.036477 *  
## stat145     -2.021e-04  2.065e-04  -0.979 0.327773    
## stat146     -5.488e-04  2.046e-04  -2.682 0.007336 ** 
## stat147     -3.803e-04  2.043e-04  -1.861 0.062753 .  
## stat148     -4.713e-04  2.012e-04  -2.342 0.019208 *  
## stat149     -5.666e-04  2.044e-04  -2.772 0.005594 ** 
## stat150     -1.613e-04  2.039e-04  -0.791 0.429042    
## stat151      1.679e-04  2.064e-04   0.814 0.415910    
## stat152     -9.459e-05  2.028e-04  -0.466 0.640965    
## stat153      2.774e-04  2.062e-04   1.345 0.178569    
## stat154      1.904e-04  2.053e-04   0.927 0.353729    
## stat155      1.785e-04  2.025e-04   0.882 0.377963    
## stat156      3.256e-04  2.037e-04   1.599 0.109928    
## stat157     -5.249e-05  2.019e-04  -0.260 0.794869    
## stat158      6.312e-05  2.060e-04   0.306 0.759316    
## stat159     -5.031e-05  2.020e-04  -0.249 0.803282    
## stat160      5.815e-05  2.043e-04   0.285 0.775937    
## stat161      2.265e-04  2.038e-04   1.112 0.266279    
## stat162     -8.879e-05  2.015e-04  -0.441 0.659426    
## stat163     -2.999e-05  2.077e-04  -0.144 0.885218    
## stat164     -1.263e-06  2.056e-04  -0.006 0.995100    
## stat165     -1.629e-04  2.020e-04  -0.806 0.420016    
## stat166     -1.085e-04  2.003e-04  -0.542 0.588145    
## stat167     -2.510e-04  2.021e-04  -1.242 0.214346    
## stat168     -1.608e-04  2.026e-04  -0.793 0.427659    
## stat169      1.117e-05  2.039e-04   0.055 0.956291    
## stat170     -4.501e-04  2.035e-04  -2.212 0.027037 *  
## stat171     -1.368e-04  2.039e-04  -0.671 0.502379    
## stat172      5.263e-04  2.009e-04   2.619 0.008844 ** 
## stat173     -4.421e-06  2.047e-04  -0.022 0.982765    
## stat174      1.315e-05  2.038e-04   0.065 0.948562    
## stat175     -3.334e-04  2.030e-04  -1.642 0.100687    
## stat176     -1.583e-04  2.030e-04  -0.780 0.435685    
## stat177     -4.875e-04  2.049e-04  -2.379 0.017379 *  
## stat178      2.950e-05  2.060e-04   0.143 0.886159    
## stat179      3.408e-05  2.033e-04   0.168 0.866852    
## stat180      8.391e-05  2.019e-04   0.416 0.677708    
## stat181      3.517e-04  2.027e-04   1.735 0.082784 .  
## stat182      7.996e-05  2.046e-04   0.391 0.695973    
## stat183      3.837e-05  2.026e-04   0.189 0.849818    
## stat184      2.248e-04  2.041e-04   1.101 0.270872    
## stat185      1.200e-04  2.010e-04   0.597 0.550418    
## stat186      6.982e-05  2.047e-04   0.341 0.733040    
## stat187     -2.043e-04  2.029e-04  -1.007 0.314204    
## stat188      3.081e-04  2.031e-04   1.517 0.129321    
## stat189      7.887e-05  2.035e-04   0.387 0.698425    
## stat190     -5.411e-05  2.028e-04  -0.267 0.789609    
## stat191     -1.799e-04  2.036e-04  -0.884 0.376970    
## stat192     -3.377e-05  2.055e-04  -0.164 0.869478    
## stat193      1.502e-04  2.052e-04   0.732 0.464204    
## stat194     -2.072e-05  2.035e-04  -0.102 0.918915    
## stat195      7.567e-05  2.023e-04   0.374 0.708417    
## stat196     -5.442e-05  2.069e-04  -0.263 0.792519    
## stat197      1.586e-04  2.019e-04   0.785 0.432234    
## stat198     -2.360e-04  2.035e-04  -1.160 0.246217    
## stat199      2.931e-04  2.024e-04   1.448 0.147682    
## stat200      5.755e-05  2.013e-04   0.286 0.775014    
## stat201      1.457e-05  2.042e-04   0.071 0.943129    
## stat202     -8.812e-05  2.055e-04  -0.429 0.668091    
## stat203      2.431e-04  2.026e-04   1.200 0.230366    
## stat204     -3.462e-04  2.017e-04  -1.716 0.086195 .  
## stat205      1.814e-04  2.023e-04   0.896 0.370164    
## stat206     -6.453e-05  2.047e-04  -0.315 0.752660    
## stat207      3.638e-04  2.039e-04   1.784 0.074438 .  
## stat208      1.682e-04  2.042e-04   0.824 0.410094    
## stat209      1.810e-05  2.028e-04   0.089 0.928870    
## stat210     -2.348e-04  2.034e-04  -1.154 0.248406    
## stat211     -1.837e-04  2.032e-04  -0.904 0.366096    
## stat212      2.195e-07  2.024e-04   0.001 0.999135    
## stat213     -7.592e-05  2.050e-04  -0.370 0.711115    
## stat214     -3.058e-04  2.041e-04  -1.498 0.134165    
## stat215     -1.710e-04  2.042e-04  -0.837 0.402531    
## stat216      4.235e-05  2.032e-04   0.208 0.834891    
## stat217      1.779e-04  2.048e-04   0.869 0.385150    
## sqrt.x18     2.671e-02  7.751e-04  34.463  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.02504 on 5058 degrees of freedom
## Multiple R-squared:  0.3845, Adjusted R-squared:  0.3553 
## F-statistic: 13.16 on 240 and 5058 DF,  p-value: < 2.2e-16
cd.full2 = plot.diagnostics(model.full2, data.train2)

## [1] "Number of data points that have Cook's D > 4/n: 282"
## [1] "Number of data points that have Cook's D > 1: 0"
# much more normal residuals than before. 
# Checking to see if distributions are different and if so whcih variables
# High Leverage Plot 
plotData = data.train %>% 
  rownames_to_column() %>%
  mutate(type=ifelse(rowname %in% high.cd,'High','Normal')) %>%
  dplyr::select(type,target=one_of(label.names))

ggplot(data=plotData, aes(x=type,y=target)) +
  geom_boxplot(fill='light blue',outlier.shape=NA) +
  scale_y_continuous(name="Target Variable Values") +
  theme_light() +
  ggtitle('Distribution of High Leverage Points and Normal  Points')

plotData = data.train %>% 
  rownames_to_column() %>%
  mutate(type=ifelse(rowname %in% high.cd,'High','Normal')) %>%
  dplyr::select(type,one_of(feature.names))
# 2 sample t-tests
comp.test = lapply(dplyr::select(plotData, one_of(feature.names)), function(x) t.test(x ~ plotData$type, var.equal = TRUE)) 

sig.comp = list.filter(comp.test, p.value < 0.05)
sapply(sig.comp, function(x) x[['p.value']])
##           x8        stat8       stat22       stat47       stat70 
## 2.474226e-03 1.927080e-02 2.398968e-02 2.344401e-02 2.653913e-02 
##       stat74       stat82       stat95       stat98      stat110 
## 1.353240e-02 1.007109e-02 3.878055e-02 4.037782e-08 3.838879e-05 
##      stat128      stat145      stat200     sqrt.x18 
## 3.828398e-02 4.921478e-02 9.765306e-03 1.972101e-02
# Distribution (box) Plots
mm = melt(plotData, id=c('type'))

ggplot(mm) +
  geom_boxplot(aes(x=type, y=value))+
  facet_wrap(~variable, ncol=16, scales = 'free') +
  ggtitle('Distribution of High Leverage Points and Normal Points')

# title = paste("comparison_trans",params$trans,'.jpeg', sep="")
# ggsave(title, width =100, height = 200, units='cm',limitsize = FALSE)

Grand Means Model

model.null = lm(grand.mean.formula, data.train)
model.null2 = lm(grand.mean.formula, data.train2)

Variable Selection

Basic: http://www.stat.columbia.edu/~martin/W2024/R10.pdf Cross Validation + Other Metrics: http://www.sthda.com/english/articles/37-model-selection-essentials-in-r/154-stepwise-regression-essentials-in-r/

Forward Selection (w/ full train)

Train

if (algo.forward == TRUE){
  t1 = Sys.time()
  
  model.forward = step(model.null, scope=list(lower=model.null, upper=model.full), direction="forward", trace = 0)
  print(summary(model.forward))

  t2 = Sys.time()
  print (paste("Time taken for Forward Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.forward, data.train)
}

Test

if (algo.forward == TRUE){
  test.model(model.forward, data.test, "Forward Selection")
}

Forward Selection (w/ filtered train)

Train

if (algo.forward == TRUE){
  t1 = Sys.time()
  
  model.forward2 = step(model.null2, scope=list(lower=model.null2, upper=model.full2), direction="forward", trace = 0)
  print(summary(model.forward2))

  t2 = Sys.time()
  print (paste("Time taken for Forward Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.forward2, data.train2)
}

Test

if (algo.forward == TRUE){
  test.model(model.forward2, data.test, "Forward Selection (2)")
}

Forward Selection with CV (w/ full train)

Train

if (algo.forward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   , data = data.train
                                   , method = "leapForward"
                                   , feature.names = feature.names)
  model.forward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 8 on full training set
## [1] "All models results"
##     nvmax       RMSE  Rsquared        MAE      RMSESD RsquaredSD
## 1       1 0.03397144 0.1129785 0.02646469 0.001366504 0.03258230
## 2       2 0.03314757 0.1546431 0.02580411 0.001274010 0.03550349
## 3       3 0.03253002 0.1853842 0.02520471 0.001137243 0.03345947
## 4       4 0.03198437 0.2123327 0.02446596 0.001217541 0.03861630
## 5       5 0.03168250 0.2267382 0.02423192 0.001186098 0.03411053
## 6       6 0.03158563 0.2313478 0.02414987 0.001141436 0.03245483
## 7       7 0.03151956 0.2344940 0.02412544 0.001171232 0.03109860
## 8       8 0.03147259 0.2368539 0.02410171 0.001168497 0.03058939
## 9       9 0.03150072 0.2354524 0.02413376 0.001181995 0.03129068
## 10     10 0.03148114 0.2363709 0.02412322 0.001194718 0.03116077
## 11     11 0.03149740 0.2356756 0.02413578 0.001213103 0.03187730
## 12     12 0.03148258 0.2364391 0.02413605 0.001244124 0.03330571
## 13     13 0.03150559 0.2353915 0.02416300 0.001253546 0.03181309
## 14     14 0.03152527 0.2344496 0.02418075 0.001240694 0.03226716
## 15     15 0.03157088 0.2322994 0.02420247 0.001241489 0.03132019
## 16     16 0.03160219 0.2308665 0.02423099 0.001248865 0.03165040
## 17     17 0.03159906 0.2310707 0.02423745 0.001246188 0.03184010
## 18     18 0.03159258 0.2314126 0.02424362 0.001253750 0.03238154
## 19     19 0.03162018 0.2301391 0.02426857 0.001271698 0.03311663
## 20     20 0.03162013 0.2301321 0.02426828 0.001261502 0.03335763
## 21     21 0.03163933 0.2293298 0.02428201 0.001267756 0.03427661
## 22     22 0.03166012 0.2284369 0.02429724 0.001264940 0.03482626
## 23     23 0.03166871 0.2280750 0.02430038 0.001255371 0.03479053
## 24     24 0.03167080 0.2279897 0.02431120 0.001246404 0.03537236
## 25     25 0.03167479 0.2278325 0.02431778 0.001243811 0.03507049
## 26     26 0.03169081 0.2271879 0.02433412 0.001250790 0.03517476
## 27     27 0.03169729 0.2268934 0.02433913 0.001249120 0.03486601
## 28     28 0.03170789 0.2264480 0.02433987 0.001264853 0.03487480
## 29     29 0.03171902 0.2259806 0.02435463 0.001271960 0.03444101
## 30     30 0.03173570 0.2252595 0.02437035 0.001266889 0.03432566
## 31     31 0.03173195 0.2254884 0.02436233 0.001260558 0.03454664
## 32     32 0.03173178 0.2255218 0.02435047 0.001268830 0.03454675
## 33     33 0.03173877 0.2252352 0.02435169 0.001268531 0.03411688
## 34     34 0.03173708 0.2253553 0.02434847 0.001266621 0.03410909
## 35     35 0.03172574 0.2258877 0.02433224 0.001267765 0.03402079
## 36     36 0.03172761 0.2257602 0.02433485 0.001231846 0.03343310
## 37     37 0.03173692 0.2253955 0.02433942 0.001230110 0.03293598
## 38     38 0.03175286 0.2247552 0.02434568 0.001223433 0.03278753
## 39     39 0.03177022 0.2240014 0.02435400 0.001209248 0.03260851
## 40     40 0.03176735 0.2241423 0.02435055 0.001214058 0.03213387
## 41     41 0.03175933 0.2245519 0.02434048 0.001215681 0.03279103
## 42     42 0.03176760 0.2242222 0.02435003 0.001210646 0.03304433
## 43     43 0.03178174 0.2236632 0.02436094 0.001223114 0.03245176
## 44     44 0.03176548 0.2244880 0.02434290 0.001236659 0.03287807
## 45     45 0.03175557 0.2249347 0.02434567 0.001227385 0.03214414
## 46     46 0.03175276 0.2250968 0.02435355 0.001228814 0.03255196
## 47     47 0.03174658 0.2253998 0.02435143 0.001227061 0.03291523
## 48     48 0.03174952 0.2252613 0.02435073 0.001215199 0.03212782
## 49     49 0.03176188 0.2247537 0.02436333 0.001220121 0.03241778
## 50     50 0.03175802 0.2249795 0.02435938 0.001218192 0.03212667
## 51     51 0.03177215 0.2243573 0.02436742 0.001221410 0.03241665
## 52     52 0.03176169 0.2248276 0.02436185 0.001209841 0.03250546
## 53     53 0.03175587 0.2250874 0.02435256 0.001203791 0.03197015
## 54     54 0.03174892 0.2254078 0.02435095 0.001206342 0.03182926
## 55     55 0.03177278 0.2243797 0.02437285 0.001204053 0.03238485
## 56     56 0.03176532 0.2247015 0.02436826 0.001205701 0.03254015
## 57     57 0.03177561 0.2243352 0.02437926 0.001226354 0.03283168
## 58     58 0.03177402 0.2244536 0.02438196 0.001242801 0.03333557
## 59     59 0.03178475 0.2239762 0.02437652 0.001247321 0.03354641
## 60     60 0.03177720 0.2243274 0.02436874 0.001251324 0.03361974
## 61     61 0.03178499 0.2239599 0.02437502 0.001244960 0.03394108
## 62     62 0.03180002 0.2232480 0.02438978 0.001237333 0.03365363
## 63     63 0.03179108 0.2236996 0.02438410 0.001235990 0.03369295
## 64     64 0.03179740 0.2234424 0.02439177 0.001241794 0.03403666
## 65     65 0.03179541 0.2236054 0.02439430 0.001246121 0.03396912
## 66     66 0.03180732 0.2231539 0.02439612 0.001258378 0.03408516
## 67     67 0.03182623 0.2222991 0.02440973 0.001258971 0.03432724
## 68     68 0.03183780 0.2217879 0.02442722 0.001261011 0.03405334
## 69     69 0.03184551 0.2214856 0.02442750 0.001267891 0.03440411
## 70     70 0.03184630 0.2215087 0.02442906 0.001274304 0.03478679
## 71     71 0.03184337 0.2216889 0.02442114 0.001281635 0.03486202
## 72     72 0.03183684 0.2220184 0.02441515 0.001281394 0.03481506
## 73     73 0.03183694 0.2220143 0.02441533 0.001272533 0.03466556
## 74     74 0.03184925 0.2215176 0.02441674 0.001272218 0.03458641
## 75     75 0.03186333 0.2209546 0.02441895 0.001279156 0.03480742
## 76     76 0.03187450 0.2204623 0.02442841 0.001278938 0.03449780
## 77     77 0.03188238 0.2201637 0.02442778 0.001279986 0.03497811
## 78     78 0.03190333 0.2192745 0.02444284 0.001286165 0.03469368
## 79     79 0.03191002 0.2190375 0.02445361 0.001288465 0.03495776
## 80     80 0.03192131 0.2185948 0.02446153 0.001302981 0.03513404
## 81     81 0.03192621 0.2184263 0.02446874 0.001308061 0.03539521
## 82     82 0.03193266 0.2181648 0.02447590 0.001317219 0.03536867
## 83     83 0.03193085 0.2182449 0.02447338 0.001316321 0.03532296
## 84     84 0.03194028 0.2178353 0.02448064 0.001316106 0.03552792
## 85     85 0.03194180 0.2177406 0.02447670 0.001312138 0.03486352
## 86     86 0.03193725 0.2179198 0.02446973 0.001305932 0.03461076
## 87     87 0.03193065 0.2182142 0.02446107 0.001308271 0.03471609
## 88     88 0.03193170 0.2181732 0.02445894 0.001315325 0.03443012
## 89     89 0.03193626 0.2180138 0.02446334 0.001318447 0.03434676
## 90     90 0.03194121 0.2178273 0.02446877 0.001314081 0.03437844
## 91     91 0.03195225 0.2174089 0.02448003 0.001313453 0.03438395
## 92     92 0.03196800 0.2167002 0.02450098 0.001301897 0.03438135
## 93     93 0.03197321 0.2164979 0.02450875 0.001293551 0.03448274
## 94     94 0.03197243 0.2165415 0.02450040 0.001286236 0.03419468
## 95     95 0.03198100 0.2161543 0.02450311 0.001283047 0.03416721
## 96     96 0.03198365 0.2160224 0.02451247 0.001285981 0.03440372
## 97     97 0.03198720 0.2158613 0.02451704 0.001289484 0.03467673
## 98     98 0.03199184 0.2157313 0.02451882 0.001297125 0.03496421
## 99     99 0.03199006 0.2158198 0.02451945 0.001297250 0.03504391
## 100   100 0.03200253 0.2152829 0.02453151 0.001299540 0.03497747
## 101   101 0.03200141 0.2153531 0.02453575 0.001304584 0.03537765
## 102   102 0.03201229 0.2149104 0.02454520 0.001300662 0.03551619
## 103   103 0.03201540 0.2147882 0.02453950 0.001295391 0.03505053
## 104   104 0.03201099 0.2149694 0.02453090 0.001293181 0.03511864
## 105   105 0.03201176 0.2149700 0.02452685 0.001293906 0.03506939
## 106   106 0.03200785 0.2151649 0.02452128 0.001289098 0.03469746
## 107   107 0.03202238 0.2145488 0.02453230 0.001283446 0.03449773
## 108   108 0.03202533 0.2144316 0.02453916 0.001289666 0.03477391
## 109   109 0.03202846 0.2142981 0.02453740 0.001286473 0.03468906
## 110   110 0.03204119 0.2137672 0.02455245 0.001287605 0.03488803
## 111   111 0.03204056 0.2138057 0.02454602 0.001293286 0.03523541
## 112   112 0.03205242 0.2132791 0.02455613 0.001291270 0.03533173
## 113   113 0.03205435 0.2131939 0.02455707 0.001288004 0.03545524
## 114   114 0.03205991 0.2129462 0.02456725 0.001292652 0.03548638
## 115   115 0.03205438 0.2131959 0.02456160 0.001290048 0.03558039
## 116   116 0.03205650 0.2131061 0.02456430 0.001293164 0.03579642
## 117   117 0.03205496 0.2131835 0.02456740 0.001293939 0.03570251
## 118   118 0.03205403 0.2132420 0.02457067 0.001289293 0.03594683
## 119   119 0.03205130 0.2133866 0.02456940 0.001279608 0.03587343
## 120   120 0.03205178 0.2133855 0.02457048 0.001285074 0.03600407
## 121   121 0.03204444 0.2136766 0.02456486 0.001285329 0.03561639
## 122   122 0.03204305 0.2137514 0.02456739 0.001283484 0.03568345
## 123   123 0.03203944 0.2138842 0.02456360 0.001287672 0.03552027
## 124   124 0.03204487 0.2136660 0.02455905 0.001293804 0.03551185
## 125   125 0.03204133 0.2138291 0.02455599 0.001288710 0.03545408
## 126   126 0.03204446 0.2136940 0.02456414 0.001293287 0.03515529
## 127   127 0.03204949 0.2135012 0.02456821 0.001298904 0.03549780
## 128   128 0.03205275 0.2133728 0.02457444 0.001307631 0.03580807
## 129   129 0.03205752 0.2131887 0.02457823 0.001305413 0.03588362
## 130   130 0.03205168 0.2134671 0.02457551 0.001305950 0.03602731
## 131   131 0.03205578 0.2132881 0.02457457 0.001304710 0.03593469
## 132   132 0.03205956 0.2131318 0.02457283 0.001295080 0.03561789
## 133   133 0.03205874 0.2131661 0.02457019 0.001291950 0.03533858
## 134   134 0.03206638 0.2128481 0.02457349 0.001289012 0.03542598
## 135   135 0.03207091 0.2126764 0.02457507 0.001286413 0.03542002
## 136   136 0.03207919 0.2123517 0.02457962 0.001293855 0.03551932
## 137   137 0.03208601 0.2120720 0.02458653 0.001286708 0.03529486
## 138   138 0.03208980 0.2118855 0.02458915 0.001284034 0.03505935
## 139   139 0.03209093 0.2118676 0.02458748 0.001286125 0.03513304
## 140   140 0.03209384 0.2117487 0.02458741 0.001288905 0.03530224
## 141   141 0.03209937 0.2115237 0.02458814 0.001292393 0.03524343
## 142   142 0.03209846 0.2115525 0.02458700 0.001289407 0.03511773
## 143   143 0.03209934 0.2115189 0.02458791 0.001290353 0.03512672
## 144   144 0.03210438 0.2112854 0.02459210 0.001287172 0.03490769
## 145   145 0.03210386 0.2113364 0.02459512 0.001289128 0.03507749
## 146   146 0.03210377 0.2113455 0.02459709 0.001290369 0.03520337
## 147   147 0.03210217 0.2114494 0.02459616 0.001295362 0.03565118
## 148   148 0.03211042 0.2111009 0.02460296 0.001291023 0.03570741
## 149   149 0.03210918 0.2111543 0.02459978 0.001292304 0.03569529
## 150   150 0.03211233 0.2110155 0.02460135 0.001296029 0.03586168
## 151   151 0.03211043 0.2111258 0.02460285 0.001299249 0.03582659
## 152   152 0.03210892 0.2111951 0.02460242 0.001296656 0.03581763
## 153   153 0.03211612 0.2108914 0.02461005 0.001289386 0.03563780
## 154   154 0.03211704 0.2108744 0.02461025 0.001287566 0.03568467
## 155   155 0.03211938 0.2107973 0.02460884 0.001285243 0.03564241
## 156   156 0.03211834 0.2108685 0.02460685 0.001283670 0.03558245
## 157   157 0.03211726 0.2109215 0.02460435 0.001289513 0.03582529
## 158   158 0.03212254 0.2107016 0.02460553 0.001287696 0.03588381
## 159   159 0.03212238 0.2107185 0.02460120 0.001286925 0.03596561
## 160   160 0.03212039 0.2107867 0.02459868 0.001280930 0.03583024
## 161   161 0.03211789 0.2109129 0.02459867 0.001282675 0.03577708
## 162   162 0.03211504 0.2110338 0.02459476 0.001282423 0.03579293
## 163   163 0.03211207 0.2111600 0.02459397 0.001279403 0.03586086
## 164   164 0.03211370 0.2111089 0.02459693 0.001282829 0.03600227
## 165   165 0.03211447 0.2110685 0.02459664 0.001275592 0.03568798
## 166   166 0.03211582 0.2110339 0.02459615 0.001276216 0.03576516
## 167   167 0.03211912 0.2108933 0.02460099 0.001275689 0.03590047
## 168   168 0.03211606 0.2110497 0.02459957 0.001275161 0.03606083
## 169   169 0.03211532 0.2110758 0.02460002 0.001274486 0.03604329
## 170   170 0.03211645 0.2110178 0.02460163 0.001274399 0.03602364
## 171   171 0.03211421 0.2111275 0.02459805 0.001278596 0.03625024
## 172   172 0.03212064 0.2108452 0.02460571 0.001274984 0.03608893
## 173   173 0.03212131 0.2108464 0.02460494 0.001276154 0.03621779
## 174   174 0.03212541 0.2106707 0.02460974 0.001277735 0.03623490
## 175   175 0.03212702 0.2106176 0.02460852 0.001281577 0.03624617
## 176   176 0.03212510 0.2107210 0.02460529 0.001283015 0.03614371
## 177   177 0.03212188 0.2108583 0.02460295 0.001282118 0.03603258
## 178   178 0.03212137 0.2108806 0.02460325 0.001283068 0.03609017
## 179   179 0.03211953 0.2109631 0.02460360 0.001280301 0.03620408
## 180   180 0.03211934 0.2109706 0.02460538 0.001278688 0.03620059
## 181   181 0.03211670 0.2110887 0.02460443 0.001279549 0.03624950
## 182   182 0.03211694 0.2110792 0.02460413 0.001280649 0.03623879
## 183   183 0.03211552 0.2111700 0.02460620 0.001284300 0.03639933
## 184   184 0.03211531 0.2111879 0.02460559 0.001285839 0.03652608
## 185   185 0.03211161 0.2113404 0.02460203 0.001284208 0.03663121
## 186   186 0.03210980 0.2114233 0.02460261 0.001282929 0.03658051
## 187   187 0.03210819 0.2114910 0.02460264 0.001278425 0.03663120
## 188   188 0.03210691 0.2115517 0.02460385 0.001277447 0.03669301
## 189   189 0.03210705 0.2115624 0.02460263 0.001277935 0.03672165
## 190   190 0.03210774 0.2115319 0.02460213 0.001278524 0.03675736
## 191   191 0.03210631 0.2115864 0.02460148 0.001274096 0.03657726
## 192   192 0.03210902 0.2114759 0.02460451 0.001274931 0.03644834
## 193   193 0.03211282 0.2113080 0.02460908 0.001273100 0.03636595
## 194   194 0.03211473 0.2112343 0.02461162 0.001274522 0.03640338
## 195   195 0.03211356 0.2112938 0.02461320 0.001276060 0.03650754
## 196   196 0.03211388 0.2112794 0.02461356 0.001273477 0.03645464
## 197   197 0.03211402 0.2112777 0.02461529 0.001274465 0.03644262
## 198   198 0.03211497 0.2112484 0.02461708 0.001273188 0.03637776
## 199   199 0.03211297 0.2113333 0.02461547 0.001274903 0.03650418
## 200   200 0.03210975 0.2114707 0.02461451 0.001273033 0.03643291
## 201   201 0.03211050 0.2114420 0.02461689 0.001273158 0.03636370
## 202   202 0.03211143 0.2114086 0.02461750 0.001276844 0.03645985
## 203   203 0.03211030 0.2114482 0.02461517 0.001276197 0.03651535
## 204   204 0.03211078 0.2114262 0.02461610 0.001275630 0.03650562
## 205   205 0.03210932 0.2114865 0.02461411 0.001276812 0.03654330
## 206   206 0.03210991 0.2114623 0.02461523 0.001276235 0.03649789
## 207   207 0.03211034 0.2114414 0.02461497 0.001278312 0.03655594
## 208   208 0.03211248 0.2113532 0.02461572 0.001277568 0.03656517
## 209   209 0.03211273 0.2113480 0.02461728 0.001276461 0.03652904
## 210   210 0.03211280 0.2113365 0.02461689 0.001272649 0.03632601
## 211   211 0.03211285 0.2113385 0.02461592 0.001273426 0.03637260
## 212   212 0.03211224 0.2113639 0.02461477 0.001273669 0.03630954
## 213   213 0.03211188 0.2113791 0.02461543 0.001272946 0.03635200
## 214   214 0.03211207 0.2113665 0.02461610 0.001274889 0.03639020
## 215   215 0.03211099 0.2114142 0.02461445 0.001275488 0.03638040
## 216   216 0.03211171 0.2113777 0.02461499 0.001273977 0.03630878
## 217   217 0.03211213 0.2113606 0.02461525 0.001274136 0.03628404
## 218   218 0.03211270 0.2113417 0.02461549 0.001274443 0.03630974
## 219   219 0.03211285 0.2113332 0.02461559 0.001273830 0.03628105
## 220   220 0.03211297 0.2113197 0.02461554 0.001273475 0.03629616
## 221   221 0.03211368 0.2112935 0.02461591 0.001275246 0.03632758
## 222   222 0.03211446 0.2112620 0.02461723 0.001276137 0.03637547
## 223   223 0.03211577 0.2112041 0.02461773 0.001275653 0.03636885
## 224   224 0.03211624 0.2111851 0.02461784 0.001275289 0.03637780
## 225   225 0.03211608 0.2111942 0.02461824 0.001275950 0.03639167
## 226   226 0.03211631 0.2111844 0.02461856 0.001275566 0.03635043
## 227   227 0.03211699 0.2111581 0.02461883 0.001275106 0.03636560
## 228   228 0.03211754 0.2111360 0.02461884 0.001275122 0.03634111
## 229   229 0.03211846 0.2110916 0.02461944 0.001274756 0.03633198
## 230   230 0.03211878 0.2110758 0.02461948 0.001274633 0.03632161
## 231   231 0.03211911 0.2110609 0.02461979 0.001274575 0.03631451
## 232   232 0.03211888 0.2110705 0.02461982 0.001275218 0.03631995
## 233   233 0.03211883 0.2110720 0.02461967 0.001275246 0.03630390
## 234   234 0.03211810 0.2111045 0.02461931 0.001275721 0.03632636
## 235   235 0.03211787 0.2111158 0.02461909 0.001276226 0.03634494
## 236   236 0.03211763 0.2111270 0.02461898 0.001276244 0.03635228
## 237   237 0.03211759 0.2111286 0.02461883 0.001276272 0.03634773
## 238   238 0.03211757 0.2111288 0.02461888 0.001276138 0.03634485
## 239   239 0.03211759 0.2111282 0.02461893 0.001276164 0.03634986
## 240   240 0.03211761 0.2111276 0.02461892 0.001276161 0.03635025
##            MAESD
## 1   0.0007738764
## 2   0.0006800497
## 3   0.0006076308
## 4   0.0006490686
## 5   0.0006124246
## 6   0.0005400859
## 7   0.0005590591
## 8   0.0005646895
## 9   0.0005887279
## 10  0.0006101115
## 11  0.0006157131
## 12  0.0006423545
## 13  0.0006439677
## 14  0.0006360397
## 15  0.0006247845
## 16  0.0006440486
## 17  0.0006438484
## 18  0.0006633257
## 19  0.0006770111
## 20  0.0006592707
## 21  0.0006563192
## 22  0.0006630873
## 23  0.0006535612
## 24  0.0006372314
## 25  0.0006296110
## 26  0.0006239411
## 27  0.0006164103
## 28  0.0006370485
## 29  0.0006419704
## 30  0.0006343674
## 31  0.0006341227
## 32  0.0006493666
## 33  0.0006528521
## 34  0.0006405422
## 35  0.0006315178
## 36  0.0006088484
## 37  0.0006107072
## 38  0.0006055290
## 39  0.0005983415
## 40  0.0005913850
## 41  0.0005952716
## 42  0.0005961943
## 43  0.0006151530
## 44  0.0006397665
## 45  0.0006294121
## 46  0.0006274186
## 47  0.0006229992
## 48  0.0006192333
## 49  0.0006191065
## 50  0.0006259115
## 51  0.0006278611
## 52  0.0006222911
## 53  0.0006220398
## 54  0.0006209174
## 55  0.0006218302
## 56  0.0006315953
## 57  0.0006452873
## 58  0.0006494409
## 59  0.0006497047
## 60  0.0006582173
## 61  0.0006534312
## 62  0.0006470949
## 63  0.0006403747
## 64  0.0006364351
## 65  0.0006407574
## 66  0.0006512194
## 67  0.0006419915
## 68  0.0006498277
## 69  0.0006571944
## 70  0.0006648502
## 71  0.0006697706
## 72  0.0006665231
## 73  0.0006600713
## 74  0.0006569241
## 75  0.0006622987
## 76  0.0006584978
## 77  0.0006566967
## 78  0.0006602584
## 79  0.0006580898
## 80  0.0006681495
## 81  0.0006746874
## 82  0.0006875623
## 83  0.0006892391
## 84  0.0006854035
## 85  0.0006796912
## 86  0.0006733391
## 87  0.0006753947
## 88  0.0006766116
## 89  0.0006771320
## 90  0.0006637300
## 91  0.0006579050
## 92  0.0006521060
## 93  0.0006491873
## 94  0.0006428057
## 95  0.0006353924
## 96  0.0006353728
## 97  0.0006373757
## 98  0.0006429032
## 99  0.0006424890
## 100 0.0006419009
## 101 0.0006475659
## 102 0.0006441683
## 103 0.0006367049
## 104 0.0006339285
## 105 0.0006300831
## 106 0.0006158191
## 107 0.0006089995
## 108 0.0006186232
## 109 0.0006179216
## 110 0.0006215524
## 111 0.0006309560
## 112 0.0006231008
## 113 0.0006264868
## 114 0.0006271743
## 115 0.0006300161
## 116 0.0006360653
## 117 0.0006366360
## 118 0.0006337669
## 119 0.0006262324
## 120 0.0006262675
## 121 0.0006269341
## 122 0.0006265481
## 123 0.0006233257
## 124 0.0006253932
## 125 0.0006245597
## 126 0.0006294384
## 127 0.0006368725
## 128 0.0006430382
## 129 0.0006397706
## 130 0.0006410945
## 131 0.0006402042
## 132 0.0006349733
## 133 0.0006290121
## 134 0.0006229248
## 135 0.0006224040
## 136 0.0006239470
## 137 0.0006163231
## 138 0.0006104025
## 139 0.0006128029
## 140 0.0006123699
## 141 0.0006171464
## 142 0.0006157932
## 143 0.0006187758
## 144 0.0006146636
## 145 0.0006185304
## 146 0.0006201095
## 147 0.0006246183
## 148 0.0006219033
## 149 0.0006246471
## 150 0.0006241457
## 151 0.0006259768
## 152 0.0006260711
## 153 0.0006242627
## 154 0.0006228299
## 155 0.0006224813
## 156 0.0006201554
## 157 0.0006285684
## 158 0.0006275751
## 159 0.0006289486
## 160 0.0006252708
## 161 0.0006285739
## 162 0.0006286201
## 163 0.0006277978
## 164 0.0006285813
## 165 0.0006227745
## 166 0.0006216241
## 167 0.0006221029
## 168 0.0006196923
## 169 0.0006172746
## 170 0.0006193770
## 171 0.0006210400
## 172 0.0006181907
## 173 0.0006188864
## 174 0.0006212996
## 175 0.0006212762
## 176 0.0006201701
## 177 0.0006223350
## 178 0.0006255825
## 179 0.0006238174
## 180 0.0006194299
## 181 0.0006170560
## 182 0.0006200659
## 183 0.0006230323
## 184 0.0006215474
## 185 0.0006196295
## 186 0.0006205600
## 187 0.0006171782
## 188 0.0006152187
## 189 0.0006161652
## 190 0.0006167571
## 191 0.0006139520
## 192 0.0006133504
## 193 0.0006118284
## 194 0.0006140730
## 195 0.0006144952
## 196 0.0006153009
## 197 0.0006153820
## 198 0.0006109803
## 199 0.0006106250
## 200 0.0006083634
## 201 0.0006088958
## 202 0.0006125211
## 203 0.0006121844
## 204 0.0006129577
## 205 0.0006134732
## 206 0.0006128079
## 207 0.0006140862
## 208 0.0006139382
## 209 0.0006135294
## 210 0.0006087699
## 211 0.0006092435
## 212 0.0006093801
## 213 0.0006084648
## 214 0.0006088704
## 215 0.0006098838
## 216 0.0006093406
## 217 0.0006088508
## 218 0.0006094981
## 219 0.0006087350
## 220 0.0006086121
## 221 0.0006100405
## 222 0.0006108499
## 223 0.0006102335
## 224 0.0006098857
## 225 0.0006105760
## 226 0.0006092498
## 227 0.0006080224
## 228 0.0006079035
## 229 0.0006076066
## 230 0.0006072950
## 231 0.0006073377
## 232 0.0006076399
## 233 0.0006076180
## 234 0.0006080520
## 235 0.0006083319
## 236 0.0006083134
## 237 0.0006082731
## 238 0.0006084270
## 239 0.0006084667
## 240 0.0006084272
## [1] "Best Model"
##   nvmax
## 8     8

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

## [1] "Coefficients of final model:"
##                  Estimate         2.5 %        97.5 %
## (Intercept)  2.000745e+00  1.9944123678  2.007078e+00
## x4          -5.339736e-05 -0.0000708296 -3.596511e-05
## x7           1.126502e-02  0.0100390043  1.249104e-02
## x9           3.419457e-03  0.0027827248  4.056190e-03
## x16          8.893311e-04  0.0004770930  1.301569e-03
## x17          1.569187e-03  0.0009371998  2.201173e-03
## stat98       3.679003e-03  0.0032090828  4.148922e-03
## stat110     -3.396195e-03 -0.0038709613 -2.921428e-03
## sqrt.x18     2.634127e-02  0.0245190385  2.816351e-02

Test

if (algo.forward.caret == TRUE){
    test.model(model.forward, data.test
             ,method = 'leapForward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.042   2.086   2.098   2.098   2.111   2.145 
## [1] "leapForward  Test MSE: 0.00104706068550194"

Forward Selection with CV (w/ filtered train)

Train

if (algo.forward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method =  "leapForward"
                                   ,feature.names = feature.names)
  model.forward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 13 on full training set
## [1] "All models results"
##     nvmax       RMSE  Rsquared        MAE       RMSESD RsquaredSD
## 1       1 0.02870415 0.1530762 0.02317988 0.0006951382 0.01549603
## 2       2 0.02757227 0.2187844 0.02242433 0.0007527183 0.01820421
## 3       3 0.02705829 0.2479533 0.02192760 0.0008285373 0.02374708
## 4       4 0.02625232 0.2917906 0.02107256 0.0007462016 0.02401253
## 5       5 0.02588014 0.3117589 0.02079034 0.0008063445 0.03139083
## 6       6 0.02571972 0.3202667 0.02066781 0.0008228725 0.03226738
## 7       7 0.02565726 0.3235663 0.02065431 0.0007901694 0.03048056
## 8       8 0.02555620 0.3288700 0.02061242 0.0008109593 0.03123911
## 9       9 0.02555436 0.3290776 0.02063900 0.0008119461 0.03171220
## 10     10 0.02551324 0.3312508 0.02062474 0.0008234722 0.03246361
## 11     11 0.02539411 0.3374719 0.02053013 0.0008413873 0.03325789
## 12     12 0.02539798 0.3373039 0.02050649 0.0008320889 0.03222254
## 13     13 0.02538292 0.3380596 0.02049708 0.0008467126 0.03340128
## 14     14 0.02542575 0.3358779 0.02052054 0.0008402712 0.03180199
## 15     15 0.02543250 0.3355319 0.02052936 0.0008037380 0.02999873
## 16     16 0.02545470 0.3343357 0.02056416 0.0007952837 0.03013225
## 17     17 0.02545915 0.3341483 0.02057720 0.0007994345 0.03044657
## 18     18 0.02545586 0.3342969 0.02058515 0.0007900351 0.02959013
## 19     19 0.02545644 0.3342769 0.02058191 0.0008130058 0.03064783
## 20     20 0.02546603 0.3338423 0.02058953 0.0008001520 0.02966257
## 21     21 0.02548570 0.3328702 0.02060157 0.0007898450 0.02854075
## 22     22 0.02550989 0.3316802 0.02062654 0.0007887644 0.02787369
## 23     23 0.02551426 0.3314941 0.02062817 0.0007884162 0.02812047
## 24     24 0.02554525 0.3299931 0.02064889 0.0007714648 0.02702875
## 25     25 0.02553390 0.3306420 0.02063629 0.0007627957 0.02656068
## 26     26 0.02553609 0.3305133 0.02063175 0.0007539166 0.02613752
## 27     27 0.02555146 0.3297774 0.02064753 0.0007502737 0.02610345
## 28     28 0.02555216 0.3297695 0.02064357 0.0007644620 0.02598911
## 29     29 0.02556904 0.3288954 0.02066513 0.0007616115 0.02493889
## 30     30 0.02557216 0.3287806 0.02066440 0.0007405399 0.02369441
## 31     31 0.02557860 0.3284610 0.02066563 0.0007433400 0.02447159
## 32     32 0.02557334 0.3287990 0.02066283 0.0007537139 0.02502705
## 33     33 0.02557399 0.3288792 0.02066257 0.0007443077 0.02466191
## 34     34 0.02556635 0.3293076 0.02065355 0.0007284537 0.02424829
## 35     35 0.02558213 0.3285829 0.02066454 0.0007236990 0.02352952
## 36     36 0.02557310 0.3290209 0.02065348 0.0007172688 0.02384501
## 37     37 0.02556768 0.3293891 0.02064730 0.0007152342 0.02383893
## 38     38 0.02555150 0.3302178 0.02062843 0.0007161157 0.02384966
## 39     39 0.02554104 0.3307695 0.02062328 0.0007138616 0.02446382
## 40     40 0.02552414 0.3316659 0.02060322 0.0007083851 0.02442114
## 41     41 0.02552736 0.3315637 0.02060345 0.0007190022 0.02487464
## 42     42 0.02550932 0.3324852 0.02057997 0.0007194547 0.02518593
## 43     43 0.02551538 0.3322004 0.02058599 0.0007176034 0.02468445
## 44     44 0.02551022 0.3324693 0.02058178 0.0007323204 0.02503804
## 45     45 0.02549902 0.3330720 0.02057630 0.0007404901 0.02591916
## 46     46 0.02547623 0.3342670 0.02056901 0.0007378437 0.02527144
## 47     47 0.02546389 0.3349215 0.02056380 0.0007421947 0.02563933
## 48     48 0.02545394 0.3354060 0.02055621 0.0007520928 0.02537946
## 49     49 0.02546291 0.3349875 0.02056058 0.0007529925 0.02571544
## 50     50 0.02546055 0.3350975 0.02055849 0.0007421532 0.02542074
## 51     51 0.02546627 0.3348229 0.02056653 0.0007399208 0.02494710
## 52     52 0.02547360 0.3345113 0.02057981 0.0007403800 0.02518017
## 53     53 0.02549058 0.3336711 0.02059213 0.0007458618 0.02497991
## 54     54 0.02549947 0.3332893 0.02060069 0.0007419077 0.02482123
## 55     55 0.02548467 0.3340274 0.02058485 0.0007420576 0.02459514
## 56     56 0.02549163 0.3336948 0.02059423 0.0007359244 0.02426832
## 57     57 0.02548874 0.3338475 0.02059009 0.0007298679 0.02397800
## 58     58 0.02549631 0.3334922 0.02058337 0.0007163712 0.02367990
## 59     59 0.02548334 0.3341513 0.02057264 0.0007089747 0.02384783
## 60     60 0.02547014 0.3347913 0.02056442 0.0007158416 0.02344431
## 61     61 0.02548760 0.3339832 0.02057206 0.0007219270 0.02389614
## 62     62 0.02548615 0.3340830 0.02056974 0.0007267250 0.02460010
## 63     63 0.02548196 0.3343028 0.02056785 0.0007326737 0.02489218
## 64     64 0.02548362 0.3342654 0.02056840 0.0007404024 0.02538997
## 65     65 0.02548796 0.3340661 0.02057197 0.0007320425 0.02556539
## 66     66 0.02549055 0.3339478 0.02056380 0.0007180305 0.02540145
## 67     67 0.02548889 0.3340661 0.02055727 0.0007031121 0.02525157
## 68     68 0.02548381 0.3343165 0.02055189 0.0007003180 0.02481637
## 69     69 0.02548265 0.3343655 0.02054994 0.0006961090 0.02485553
## 70     70 0.02547952 0.3344967 0.02054633 0.0007017623 0.02517282
## 71     71 0.02547715 0.3346235 0.02054134 0.0007019233 0.02514776
## 72     72 0.02548670 0.3341986 0.02054856 0.0007030866 0.02499515
## 73     73 0.02549553 0.3337916 0.02055195 0.0007086711 0.02492431
## 74     74 0.02549253 0.3339825 0.02055267 0.0006971552 0.02450446
## 75     75 0.02550027 0.3336201 0.02055969 0.0007007075 0.02466488
## 76     76 0.02550194 0.3335320 0.02056317 0.0007015282 0.02442938
## 77     77 0.02549495 0.3339063 0.02055862 0.0007036901 0.02475500
## 78     78 0.02551346 0.3329798 0.02057049 0.0007027712 0.02476793
## 79     79 0.02551300 0.3330108 0.02057208 0.0006952941 0.02438321
## 80     80 0.02551150 0.3330494 0.02057389 0.0006916693 0.02481107
## 81     81 0.02550486 0.3333633 0.02056426 0.0006859699 0.02484652
## 82     82 0.02550669 0.3332420 0.02056794 0.0006768475 0.02442057
## 83     83 0.02551064 0.3330853 0.02057374 0.0006757425 0.02417426
## 84     84 0.02550845 0.3332328 0.02057408 0.0006741514 0.02372026
## 85     85 0.02551347 0.3329815 0.02057326 0.0006721947 0.02368715
## 86     86 0.02551370 0.3329829 0.02057639 0.0006872598 0.02421375
## 87     87 0.02550700 0.3333293 0.02057026 0.0006821979 0.02393695
## 88     88 0.02551033 0.3332071 0.02057406 0.0006801703 0.02360374
## 89     89 0.02552297 0.3326175 0.02058297 0.0006779205 0.02359942
## 90     90 0.02552864 0.3323823 0.02058890 0.0006730118 0.02342659
## 91     91 0.02552499 0.3325669 0.02059289 0.0006763292 0.02336460
## 92     92 0.02552437 0.3326469 0.02058596 0.0006768919 0.02329860
## 93     93 0.02553308 0.3322175 0.02059536 0.0006856939 0.02343825
## 94     94 0.02553196 0.3323025 0.02059021 0.0006945193 0.02371003
## 95     95 0.02553030 0.3323959 0.02059258 0.0006894819 0.02353806
## 96     96 0.02553928 0.3319674 0.02059324 0.0006880413 0.02341470
## 97     97 0.02553288 0.3322736 0.02058588 0.0006882777 0.02318300
## 98     98 0.02553111 0.3323467 0.02059005 0.0006915460 0.02359444
## 99     99 0.02553458 0.3321520 0.02058901 0.0006829839 0.02340422
## 100   100 0.02554241 0.3317836 0.02059864 0.0006883978 0.02375532
## 101   101 0.02553658 0.3320953 0.02058935 0.0006845692 0.02417144
## 102   102 0.02554921 0.3314940 0.02060166 0.0006899950 0.02441579
## 103   103 0.02554800 0.3315612 0.02060419 0.0006977685 0.02506248
## 104   104 0.02555220 0.3313797 0.02060543 0.0007047123 0.02521298
## 105   105 0.02555042 0.3315156 0.02060560 0.0007076228 0.02521565
## 106   106 0.02554764 0.3316904 0.02060736 0.0006992533 0.02493576
## 107   107 0.02555438 0.3313564 0.02061024 0.0006982822 0.02491057
## 108   108 0.02555462 0.3313432 0.02060541 0.0006972677 0.02489787
## 109   109 0.02554779 0.3316581 0.02060056 0.0006853268 0.02447553
## 110   110 0.02555077 0.3315358 0.02060348 0.0006833980 0.02453080
## 111   111 0.02555122 0.3315301 0.02060502 0.0006839686 0.02430633
## 112   112 0.02556246 0.3309766 0.02061720 0.0006841945 0.02417332
## 113   113 0.02555725 0.3312407 0.02061570 0.0006824270 0.02411596
## 114   114 0.02555676 0.3312944 0.02061341 0.0006800552 0.02407080
## 115   115 0.02555941 0.3311782 0.02061332 0.0006772952 0.02413503
## 116   116 0.02555910 0.3312156 0.02061035 0.0006807832 0.02419000
## 117   117 0.02555215 0.3315429 0.02060386 0.0006796284 0.02387054
## 118   118 0.02554552 0.3318791 0.02060085 0.0006833774 0.02391307
## 119   119 0.02554001 0.3321601 0.02059370 0.0006808080 0.02397806
## 120   120 0.02554679 0.3318189 0.02060295 0.0006806658 0.02406233
## 121   121 0.02554829 0.3317452 0.02060649 0.0006852782 0.02422378
## 122   122 0.02555404 0.3314765 0.02061085 0.0006885695 0.02427861
## 123   123 0.02554941 0.3317121 0.02060975 0.0006902600 0.02446494
## 124   124 0.02554776 0.3317953 0.02060855 0.0006932952 0.02465624
## 125   125 0.02555670 0.3313655 0.02061481 0.0006900470 0.02472006
## 126   126 0.02555569 0.3314320 0.02061214 0.0006888639 0.02487163
## 127   127 0.02556048 0.3312162 0.02061531 0.0006921134 0.02484970
## 128   128 0.02556544 0.3309864 0.02062250 0.0006955955 0.02494576
## 129   129 0.02556603 0.3309675 0.02062404 0.0006875968 0.02466019
## 130   130 0.02556693 0.3309199 0.02062015 0.0006881617 0.02447538
## 131   131 0.02556633 0.3309236 0.02062007 0.0006831475 0.02416520
## 132   132 0.02556127 0.3311583 0.02061862 0.0006756969 0.02416863
## 133   133 0.02556735 0.3308370 0.02062511 0.0006700793 0.02430860
## 134   134 0.02556522 0.3309494 0.02062404 0.0006676614 0.02418432
## 135   135 0.02556092 0.3311543 0.02062735 0.0006670432 0.02423339
## 136   136 0.02556157 0.3311705 0.02062653 0.0006728766 0.02448946
## 137   137 0.02556880 0.3308428 0.02063334 0.0006728805 0.02456668
## 138   138 0.02556653 0.3309843 0.02062846 0.0006661085 0.02420143
## 139   139 0.02557107 0.3307565 0.02063462 0.0006614256 0.02431198
## 140   140 0.02556988 0.3308132 0.02063524 0.0006608906 0.02419441
## 141   141 0.02557234 0.3307136 0.02064102 0.0006579103 0.02436086
## 142   142 0.02556806 0.3309316 0.02063953 0.0006630722 0.02450381
## 143   143 0.02556991 0.3308721 0.02064035 0.0006600494 0.02429169
## 144   144 0.02557245 0.3307569 0.02064264 0.0006658694 0.02464623
## 145   145 0.02557904 0.3304680 0.02064416 0.0006678001 0.02491968
## 146   146 0.02557598 0.3306092 0.02064130 0.0006712616 0.02520712
## 147   147 0.02557749 0.3305268 0.02064082 0.0006732440 0.02541946
## 148   148 0.02558088 0.3303684 0.02064551 0.0006752409 0.02525791
## 149   149 0.02557610 0.3306210 0.02064212 0.0006785940 0.02545993
## 150   150 0.02557486 0.3306823 0.02064126 0.0006784955 0.02550068
## 151   151 0.02557502 0.3306704 0.02063998 0.0006795723 0.02586002
## 152   152 0.02557766 0.3305669 0.02064536 0.0006805773 0.02573846
## 153   153 0.02557532 0.3306542 0.02064330 0.0006740259 0.02579505
## 154   154 0.02557613 0.3305914 0.02064517 0.0006731545 0.02611798
## 155   155 0.02557642 0.3305812 0.02064385 0.0006713560 0.02635893
## 156   156 0.02557270 0.3307583 0.02064420 0.0006726121 0.02626946
## 157   157 0.02557557 0.3306263 0.02064487 0.0006716822 0.02614517
## 158   158 0.02557253 0.3307645 0.02064202 0.0006684762 0.02580223
## 159   159 0.02557655 0.3305683 0.02064275 0.0006677687 0.02590452
## 160   160 0.02558024 0.3303993 0.02064622 0.0006739646 0.02598321
## 161   161 0.02558259 0.3303007 0.02064813 0.0006737376 0.02586956
## 162   162 0.02558713 0.3300745 0.02065040 0.0006749786 0.02597946
## 163   163 0.02558423 0.3302197 0.02064730 0.0006759271 0.02605978
## 164   164 0.02558555 0.3301415 0.02065084 0.0006785328 0.02611173
## 165   165 0.02558623 0.3301148 0.02065025 0.0006824973 0.02623894
## 166   166 0.02558570 0.3301462 0.02064794 0.0006795491 0.02615044
## 167   167 0.02558423 0.3302156 0.02064547 0.0006796077 0.02616174
## 168   168 0.02558901 0.3299812 0.02064975 0.0006747699 0.02577936
## 169   169 0.02559353 0.3297610 0.02065266 0.0006750826 0.02570423
## 170   170 0.02559265 0.3298162 0.02065180 0.0006742923 0.02572533
## 171   171 0.02559286 0.3298154 0.02065263 0.0006734194 0.02575912
## 172   172 0.02559352 0.3298050 0.02065434 0.0006696192 0.02557348
## 173   173 0.02559547 0.3297023 0.02065706 0.0006677901 0.02543725
## 174   174 0.02559201 0.3298624 0.02065369 0.0006679845 0.02537455
## 175   175 0.02559769 0.3295947 0.02065709 0.0006663317 0.02545652
## 176   176 0.02559572 0.3297032 0.02065426 0.0006652315 0.02531387
## 177   177 0.02559754 0.3296162 0.02065456 0.0006678921 0.02523403
## 178   178 0.02559670 0.3296583 0.02065446 0.0006680898 0.02522665
## 179   179 0.02559609 0.3296915 0.02065310 0.0006675795 0.02522787
## 180   180 0.02559307 0.3298393 0.02065018 0.0006691215 0.02529540
## 181   181 0.02559452 0.3297777 0.02065307 0.0006687818 0.02522923
## 182   182 0.02559560 0.3297217 0.02065478 0.0006675394 0.02532870
## 183   183 0.02559914 0.3295580 0.02065947 0.0006680326 0.02519506
## 184   184 0.02559950 0.3295401 0.02065957 0.0006672557 0.02515230
## 185   185 0.02560136 0.3294595 0.02066176 0.0006692109 0.02513318
## 186   186 0.02560027 0.3295211 0.02066128 0.0006706041 0.02525062
## 187   187 0.02559986 0.3295380 0.02066040 0.0006707763 0.02527610
## 188   188 0.02559894 0.3295796 0.02066042 0.0006719924 0.02529631
## 189   189 0.02559966 0.3295665 0.02065945 0.0006732950 0.02528338
## 190   190 0.02560143 0.3294899 0.02065925 0.0006722659 0.02533339
## 191   191 0.02560073 0.3295288 0.02065778 0.0006736604 0.02527308
## 192   192 0.02560057 0.3295349 0.02065801 0.0006696745 0.02513885
## 193   193 0.02560213 0.3294609 0.02065911 0.0006682044 0.02495100
## 194   194 0.02560591 0.3292802 0.02066232 0.0006685503 0.02491901
## 195   195 0.02560670 0.3292523 0.02066373 0.0006687499 0.02492971
## 196   196 0.02560594 0.3292814 0.02066263 0.0006690241 0.02501148
## 197   197 0.02560534 0.3293089 0.02066017 0.0006660719 0.02490441
## 198   198 0.02560665 0.3292547 0.02066059 0.0006660776 0.02484948
## 199   199 0.02560815 0.3291809 0.02066143 0.0006643371 0.02479394
## 200   200 0.02560752 0.3292040 0.02066108 0.0006652676 0.02477959
## 201   201 0.02560889 0.3291342 0.02066335 0.0006641372 0.02471650
## 202   202 0.02560960 0.3290983 0.02066353 0.0006666019 0.02474873
## 203   203 0.02560905 0.3291285 0.02066286 0.0006667326 0.02477626
## 204   204 0.02560976 0.3290869 0.02066295 0.0006670982 0.02472830
## 205   205 0.02560947 0.3291065 0.02066302 0.0006681416 0.02472124
## 206   206 0.02560928 0.3291116 0.02066254 0.0006676121 0.02472071
## 207   207 0.02561128 0.3290079 0.02066437 0.0006676623 0.02469032
## 208   208 0.02561245 0.3289445 0.02066498 0.0006680453 0.02469031
## 209   209 0.02561307 0.3289177 0.02066532 0.0006682955 0.02469280
## 210   210 0.02561389 0.3288790 0.02066688 0.0006672408 0.02464943
## 211   211 0.02561349 0.3288981 0.02066645 0.0006655666 0.02461794
## 212   212 0.02561339 0.3289000 0.02066663 0.0006646152 0.02452479
## 213   213 0.02561523 0.3288106 0.02066830 0.0006642556 0.02446183
## 214   214 0.02561718 0.3287125 0.02066887 0.0006642514 0.02444275
## 215   215 0.02561783 0.3286816 0.02066983 0.0006645588 0.02446005
## 216   216 0.02561690 0.3287269 0.02066935 0.0006648116 0.02448265
## 217   217 0.02561702 0.3287275 0.02066893 0.0006635472 0.02435807
## 218   218 0.02561640 0.3287562 0.02066916 0.0006623748 0.02436566
## 219   219 0.02561607 0.3287702 0.02066921 0.0006624976 0.02434056
## 220   220 0.02561555 0.3287945 0.02066822 0.0006620137 0.02433293
## 221   221 0.02561466 0.3288351 0.02066733 0.0006614175 0.02432382
## 222   222 0.02561521 0.3288052 0.02066723 0.0006613649 0.02432979
## 223   223 0.02561528 0.3288007 0.02066733 0.0006616678 0.02435067
## 224   224 0.02561585 0.3287736 0.02066770 0.0006622896 0.02439444
## 225   225 0.02561581 0.3287775 0.02066781 0.0006622574 0.02439560
## 226   226 0.02561603 0.3287715 0.02066810 0.0006624284 0.02434477
## 227   227 0.02561604 0.3287721 0.02066875 0.0006621836 0.02437919
## 228   228 0.02561681 0.3287341 0.02066977 0.0006619494 0.02434201
## 229   229 0.02561652 0.3287484 0.02066973 0.0006623945 0.02434896
## 230   230 0.02561622 0.3287638 0.02066972 0.0006627403 0.02440178
## 231   231 0.02561627 0.3287603 0.02066974 0.0006625980 0.02440926
## 232   232 0.02561624 0.3287624 0.02066962 0.0006630257 0.02442001
## 233   233 0.02561611 0.3287709 0.02066968 0.0006630633 0.02442092
## 234   234 0.02561591 0.3287804 0.02066947 0.0006631370 0.02442594
## 235   235 0.02561573 0.3287874 0.02066952 0.0006631023 0.02442544
## 236   236 0.02561569 0.3287896 0.02066970 0.0006630408 0.02442450
## 237   237 0.02561555 0.3287963 0.02066948 0.0006630341 0.02442236
## 238   238 0.02561561 0.3287938 0.02066947 0.0006630073 0.02441925
## 239   239 0.02561556 0.3287961 0.02066950 0.0006629952 0.02442089
## 240   240 0.02561559 0.3287947 0.02066959 0.0006629916 0.02442307
##            MAESD
## 1   0.0004505032
## 2   0.0005029019
## 3   0.0005406257
## 4   0.0005359142
## 5   0.0005696292
## 6   0.0006007779
## 7   0.0005954885
## 8   0.0006210426
## 9   0.0006184094
## 10  0.0006296173
## 11  0.0006390197
## 12  0.0006210189
## 13  0.0006286162
## 14  0.0006194143
## 15  0.0005865930
## 16  0.0005951725
## 17  0.0005875157
## 18  0.0005819540
## 19  0.0005948381
## 20  0.0005966689
## 21  0.0005883145
## 22  0.0005713903
## 23  0.0005601492
## 24  0.0005345775
## 25  0.0005171684
## 26  0.0005034275
## 27  0.0004915559
## 28  0.0004997469
## 29  0.0004964962
## 30  0.0004851177
## 31  0.0004850161
## 32  0.0004918536
## 33  0.0004866883
## 34  0.0004878372
## 35  0.0005005615
## 36  0.0004863947
## 37  0.0004937467
## 38  0.0004924037
## 39  0.0004875577
## 40  0.0004924529
## 41  0.0005050971
## 42  0.0005350995
## 43  0.0005287489
## 44  0.0005425234
## 45  0.0005419306
## 46  0.0005388297
## 47  0.0005407446
## 48  0.0005519897
## 49  0.0005516333
## 50  0.0005450978
## 51  0.0005499877
## 52  0.0005546185
## 53  0.0005466819
## 54  0.0005365651
## 55  0.0005337098
## 56  0.0005151344
## 57  0.0005093995
## 58  0.0005022460
## 59  0.0005005618
## 60  0.0005035513
## 61  0.0004988769
## 62  0.0005097558
## 63  0.0005172637
## 64  0.0005304226
## 65  0.0005289789
## 66  0.0005218646
## 67  0.0005134459
## 68  0.0005165655
## 69  0.0005161667
## 70  0.0005280959
## 71  0.0005259106
## 72  0.0005289108
## 73  0.0005323738
## 74  0.0005195230
## 75  0.0005235264
## 76  0.0005171331
## 77  0.0005185175
## 78  0.0005144317
## 79  0.0005091254
## 80  0.0005147930
## 81  0.0005158775
## 82  0.0005057485
## 83  0.0005075063
## 84  0.0005028718
## 85  0.0005022967
## 86  0.0005123234
## 87  0.0005099641
## 88  0.0005071618
## 89  0.0005116463
## 90  0.0005046660
## 91  0.0004989442
## 92  0.0004960505
## 93  0.0005067611
## 94  0.0005118142
## 95  0.0005057124
## 96  0.0005012112
## 97  0.0004997747
## 98  0.0004980587
## 99  0.0004895585
## 100 0.0004937822
## 101 0.0004936943
## 102 0.0004973038
## 103 0.0005044913
## 104 0.0005121187
## 105 0.0005141018
## 106 0.0005038869
## 107 0.0005076542
## 108 0.0005032352
## 109 0.0004958014
## 110 0.0004982228
## 111 0.0004974911
## 112 0.0004911382
## 113 0.0004893576
## 114 0.0004866591
## 115 0.0004838143
## 116 0.0004819098
## 117 0.0004832582
## 118 0.0004861818
## 119 0.0004832943
## 120 0.0004895077
## 121 0.0004903186
## 122 0.0004974027
## 123 0.0004995015
## 124 0.0005038716
## 125 0.0004988354
## 126 0.0005005925
## 127 0.0005020962
## 128 0.0004991045
## 129 0.0004900735
## 130 0.0004886489
## 131 0.0004837946
## 132 0.0004744229
## 133 0.0004741488
## 134 0.0004709849
## 135 0.0004694026
## 136 0.0004730048
## 137 0.0004716582
## 138 0.0004668423
## 139 0.0004689808
## 140 0.0004634152
## 141 0.0004602818
## 142 0.0004604086
## 143 0.0004612256
## 144 0.0004671063
## 145 0.0004666957
## 146 0.0004702000
## 147 0.0004722906
## 148 0.0004731621
## 149 0.0004754119
## 150 0.0004797752
## 151 0.0004836589
## 152 0.0004838856
## 153 0.0004803123
## 154 0.0004782844
## 155 0.0004807767
## 156 0.0004822658
## 157 0.0004793580
## 158 0.0004763456
## 159 0.0004763141
## 160 0.0004809352
## 161 0.0004801268
## 162 0.0004834504
## 163 0.0004826706
## 164 0.0004857755
## 165 0.0004873243
## 166 0.0004831841
## 167 0.0004822743
## 168 0.0004803447
## 169 0.0004816812
## 170 0.0004801342
## 171 0.0004817021
## 172 0.0004778678
## 173 0.0004719817
## 174 0.0004693531
## 175 0.0004699016
## 176 0.0004693610
## 177 0.0004684007
## 178 0.0004675453
## 179 0.0004681852
## 180 0.0004680252
## 181 0.0004672329
## 182 0.0004658797
## 183 0.0004619920
## 184 0.0004615559
## 185 0.0004607057
## 186 0.0004634363
## 187 0.0004658817
## 188 0.0004668368
## 189 0.0004653481
## 190 0.0004660821
## 191 0.0004654511
## 192 0.0004611516
## 193 0.0004587660
## 194 0.0004599235
## 195 0.0004600094
## 196 0.0004608436
## 197 0.0004582339
## 198 0.0004582379
## 199 0.0004578374
## 200 0.0004588318
## 201 0.0004584123
## 202 0.0004602114
## 203 0.0004613137
## 204 0.0004616530
## 205 0.0004614921
## 206 0.0004617412
## 207 0.0004620865
## 208 0.0004616319
## 209 0.0004624435
## 210 0.0004612288
## 211 0.0004596783
## 212 0.0004596942
## 213 0.0004581905
## 214 0.0004584029
## 215 0.0004590452
## 216 0.0004593533
## 217 0.0004579028
## 218 0.0004569131
## 219 0.0004570274
## 220 0.0004572813
## 221 0.0004566115
## 222 0.0004572682
## 223 0.0004576748
## 224 0.0004583151
## 225 0.0004584123
## 226 0.0004584486
## 227 0.0004586281
## 228 0.0004581037
## 229 0.0004587647
## 230 0.0004589868
## 231 0.0004589746
## 232 0.0004591305
## 233 0.0004590681
## 234 0.0004590140
## 235 0.0004591266
## 236 0.0004590646
## 237 0.0004590514
## 238 0.0004589379
## 239 0.0004589351
## 240 0.0004589293
## [1] "Best Model"
##    nvmax
## 13    13

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

## [1] "Coefficients of final model:"
##                  Estimate         2.5 %        97.5 %
## (Intercept)  1.980913e+00  1.974679e+00  1.987147e+00
## x4          -6.190952e-05 -7.633876e-05 -4.748029e-05
## x7           1.210509e-02  1.109182e-02  1.311836e-02
## x8           6.003970e-04  3.642962e-04  8.364977e-04
## x9           3.421865e-03  2.896815e-03  3.946916e-03
## x10          1.347223e-03  8.549445e-04  1.839501e-03
## x16          9.846174e-04  6.444391e-04  1.324796e-03
## x17          1.677457e-03  1.155602e-03  2.199311e-03
## x21          1.283488e-04  6.119492e-05  1.955026e-04
## stat4       -6.932958e-04 -1.090276e-03 -2.963155e-04
## stat14      -9.663019e-04 -1.358696e-03 -5.739075e-04
## stat98       3.418433e-03  3.030176e-03  3.806690e-03
## stat110     -3.356195e-03 -3.747807e-03 -2.964582e-03
## sqrt.x18     2.652400e-02  2.502231e-02  2.802568e-02

Test

if (algo.forward.caret == TRUE){
  test.model(model.forward, data.test
             ,method = 'leapForward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.033   2.082   2.094   2.095   2.108   2.145 
## [1] "leapForward  Test MSE: 0.00104648971088915"

Backward Elimination

Train

if (algo.backward == TRUE){
  # Takes too much time
  t1 = Sys.time()
  
  model.backward = step(model.full, data = data.train, direction="backward", trace = 0)
  print(summary(model.backward))

  t2 = Sys.time()
  print (paste("Time taken for Backward Elimination: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.backward, data.train)
}

Test

if (algo.backward == TRUE){
  test.model(model.backard, data.test, "Backward Elimination")
}

Backward Elimination with CV (w/ full train)

Train

if (algo.backward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "leapBackward"
                                   ,feature.names =  feature.names)
  model.backward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 8 on full training set
## [1] "All models results"
##     nvmax       RMSE  Rsquared        MAE      RMSESD RsquaredSD
## 1       1 0.03397144 0.1129785 0.02646469 0.001366504 0.03258230
## 2       2 0.03314757 0.1546431 0.02580411 0.001274010 0.03550349
## 3       3 0.03253002 0.1853842 0.02520471 0.001137243 0.03345947
## 4       4 0.03198437 0.2123327 0.02446596 0.001217541 0.03861630
## 5       5 0.03168250 0.2267382 0.02423192 0.001186098 0.03411053
## 6       6 0.03158563 0.2313478 0.02414987 0.001141436 0.03245483
## 7       7 0.03151956 0.2344940 0.02412544 0.001171232 0.03109860
## 8       8 0.03147259 0.2368539 0.02410171 0.001168497 0.03058939
## 9       9 0.03150072 0.2354524 0.02413376 0.001181995 0.03129068
## 10     10 0.03148114 0.2363709 0.02412322 0.001194718 0.03116077
## 11     11 0.03149740 0.2356756 0.02413578 0.001213103 0.03187730
## 12     12 0.03148258 0.2364391 0.02413605 0.001244124 0.03330571
## 13     13 0.03150559 0.2353915 0.02416300 0.001253546 0.03181309
## 14     14 0.03152527 0.2344496 0.02418075 0.001240694 0.03226716
## 15     15 0.03157088 0.2322994 0.02420247 0.001241489 0.03132019
## 16     16 0.03160145 0.2308958 0.02423478 0.001248835 0.03163953
## 17     17 0.03160186 0.2309332 0.02424001 0.001246363 0.03189616
## 18     18 0.03159431 0.2313285 0.02424619 0.001253883 0.03241756
## 19     19 0.03162206 0.2300399 0.02427201 0.001267862 0.03302363
## 20     20 0.03162195 0.2300332 0.02427172 0.001257815 0.03327049
## 21     21 0.03164067 0.2292573 0.02428008 0.001267160 0.03433294
## 22     22 0.03165708 0.2285536 0.02429242 0.001264948 0.03487318
## 23     23 0.03168132 0.2274817 0.02431134 0.001260436 0.03486253
## 24     24 0.03167985 0.2276079 0.02431258 0.001272000 0.03503268
## 25     25 0.03166867 0.2282045 0.02430895 0.001280642 0.03546477
## 26     26 0.03168709 0.2273931 0.02432236 0.001281512 0.03535174
## 27     27 0.03169683 0.2269575 0.02433137 0.001279594 0.03524543
## 28     28 0.03172408 0.2257405 0.02435030 0.001261893 0.03543042
## 29     29 0.03172970 0.2255186 0.02436025 0.001273395 0.03572900
## 30     30 0.03172736 0.2256450 0.02435862 0.001269556 0.03539230
## 31     31 0.03172696 0.2257231 0.02435797 0.001262536 0.03505364
## 32     32 0.03174912 0.2247874 0.02436988 0.001271978 0.03446602
## 33     33 0.03174871 0.2248453 0.02435823 0.001270622 0.03416520
## 34     34 0.03174427 0.2250908 0.02435229 0.001268357 0.03440786
## 35     35 0.03172596 0.2259307 0.02432998 0.001263488 0.03398111
## 36     36 0.03172940 0.2257477 0.02433167 0.001226416 0.03309880
## 37     37 0.03174709 0.2249864 0.02435194 0.001230528 0.03281577
## 38     38 0.03176720 0.2241253 0.02437346 0.001225846 0.03297961
## 39     39 0.03178042 0.2235624 0.02437797 0.001218221 0.03293098
## 40     40 0.03177153 0.2240492 0.02436439 0.001226452 0.03283667
## 41     41 0.03176447 0.2243774 0.02435765 0.001213446 0.03290903
## 42     42 0.03175812 0.2247270 0.02434583 0.001208445 0.03274952
## 43     43 0.03176721 0.2243542 0.02435850 0.001209415 0.03258155
## 44     44 0.03175375 0.2249787 0.02435262 0.001207447 0.03276084
## 45     45 0.03175415 0.2249460 0.02435717 0.001196491 0.03212525
## 46     46 0.03175495 0.2249432 0.02436022 0.001190355 0.03196173
## 47     47 0.03175042 0.2251878 0.02435167 0.001196253 0.03258849
## 48     48 0.03174830 0.2252265 0.02434710 0.001170191 0.03159486
## 49     49 0.03173495 0.2259030 0.02434433 0.001165842 0.03181480
## 50     50 0.03172476 0.2263686 0.02433380 0.001178215 0.03178378
## 51     51 0.03174309 0.2256113 0.02434827 0.001193451 0.03232985
## 52     52 0.03174110 0.2257333 0.02434498 0.001196273 0.03225977
## 53     53 0.03173627 0.2259345 0.02433656 0.001205926 0.03265282
## 54     54 0.03173532 0.2260057 0.02434353 0.001209901 0.03250082
## 55     55 0.03173775 0.2259239 0.02435150 0.001219010 0.03257121
## 56     56 0.03174831 0.2254705 0.02435392 0.001216233 0.03274294
## 57     57 0.03175948 0.2250533 0.02437152 0.001227339 0.03254865
## 58     58 0.03177549 0.2243788 0.02438278 0.001244809 0.03344693
## 59     59 0.03178594 0.2239371 0.02437674 0.001241559 0.03341879
## 60     60 0.03178613 0.2239809 0.02437316 0.001254064 0.03383253
## 61     61 0.03179220 0.2237348 0.02437530 0.001255393 0.03421472
## 62     62 0.03180348 0.2231906 0.02439174 0.001248185 0.03372713
## 63     63 0.03180015 0.2233398 0.02439505 0.001238224 0.03385516
## 64     64 0.03180046 0.2233745 0.02439917 0.001244233 0.03405049
## 65     65 0.03180000 0.2234053 0.02440543 0.001248680 0.03433253
## 66     66 0.03180270 0.2233028 0.02439950 0.001254869 0.03417247
## 67     67 0.03181633 0.2227091 0.02441224 0.001251726 0.03413578
## 68     68 0.03181936 0.2225993 0.02441325 0.001250859 0.03412458
## 69     69 0.03182375 0.2224462 0.02441914 0.001259905 0.03405835
## 70     70 0.03183604 0.2219484 0.02442851 0.001267463 0.03432957
## 71     71 0.03184685 0.2215004 0.02443486 0.001274459 0.03479558
## 72     72 0.03184415 0.2216532 0.02442953 0.001282772 0.03487233
## 73     73 0.03185751 0.2210988 0.02443067 0.001276008 0.03442090
## 74     74 0.03186715 0.2206976 0.02443227 0.001282032 0.03437472
## 75     75 0.03188430 0.2200701 0.02444015 0.001285668 0.03455568
## 76     76 0.03188808 0.2199160 0.02444334 0.001291573 0.03525693
## 77     77 0.03189181 0.2197510 0.02444170 0.001284723 0.03488353
## 78     78 0.03189777 0.2195366 0.02444854 0.001293016 0.03503956
## 79     79 0.03189967 0.2194738 0.02444698 0.001296734 0.03564542
## 80     80 0.03189907 0.2195002 0.02444877 0.001303310 0.03602702
## 81     81 0.03190613 0.2191910 0.02445201 0.001309314 0.03587062
## 82     82 0.03190860 0.2190923 0.02445369 0.001315894 0.03624115
## 83     83 0.03192215 0.2184940 0.02446296 0.001311997 0.03612531
## 84     84 0.03193285 0.2180714 0.02447245 0.001306013 0.03622668
## 85     85 0.03194105 0.2177261 0.02447804 0.001307065 0.03537147
## 86     86 0.03194274 0.2177176 0.02448030 0.001314952 0.03592025
## 87     87 0.03194347 0.2176956 0.02447783 0.001322342 0.03530341
## 88     88 0.03194072 0.2177931 0.02447823 0.001326345 0.03502022
## 89     89 0.03194217 0.2177190 0.02447888 0.001324128 0.03479041
## 90     90 0.03194800 0.2175016 0.02448487 0.001318682 0.03441793
## 91     91 0.03196745 0.2166947 0.02450197 0.001312647 0.03426601
## 92     92 0.03197448 0.2163695 0.02451054 0.001297169 0.03404281
## 93     93 0.03197592 0.2163209 0.02451199 0.001294874 0.03418905
## 94     94 0.03197580 0.2163211 0.02450991 0.001287320 0.03391235
## 95     95 0.03198411 0.2159878 0.02451359 0.001284361 0.03410092
## 96     96 0.03199084 0.2156974 0.02452408 0.001290978 0.03459219
## 97     97 0.03199287 0.2156395 0.02452594 0.001292995 0.03477461
## 98     98 0.03198919 0.2158053 0.02452682 0.001296272 0.03473173
## 99     99 0.03199240 0.2156838 0.02453536 0.001303586 0.03519563
## 100   100 0.03200285 0.2152293 0.02454134 0.001305533 0.03522416
## 101   101 0.03200956 0.2149447 0.02454319 0.001303332 0.03511577
## 102   102 0.03201308 0.2148190 0.02453774 0.001297629 0.03518627
## 103   103 0.03201909 0.2145622 0.02454355 0.001294385 0.03509791
## 104   104 0.03202372 0.2143867 0.02454610 0.001298717 0.03509227
## 105   105 0.03203180 0.2140950 0.02455588 0.001301983 0.03539245
## 106   106 0.03202804 0.2142922 0.02454865 0.001293210 0.03497080
## 107   107 0.03203281 0.2140969 0.02454999 0.001296947 0.03492842
## 108   108 0.03203071 0.2142001 0.02454627 0.001290221 0.03506410
## 109   109 0.03203716 0.2138914 0.02454726 0.001289657 0.03499032
## 110   110 0.03204114 0.2137412 0.02454974 0.001294219 0.03546963
## 111   111 0.03204373 0.2136714 0.02455211 0.001289848 0.03573499
## 112   112 0.03204648 0.2135329 0.02456076 0.001286808 0.03538721
## 113   113 0.03205102 0.2133173 0.02456334 0.001285396 0.03516566
## 114   114 0.03205172 0.2132993 0.02456781 0.001289080 0.03567249
## 115   115 0.03205048 0.2133464 0.02457020 0.001290723 0.03570877
## 116   116 0.03205288 0.2132488 0.02457361 0.001295002 0.03598700
## 117   117 0.03205349 0.2132344 0.02457016 0.001297979 0.03619134
## 118   118 0.03206000 0.2129626 0.02457734 0.001292426 0.03595361
## 119   119 0.03205997 0.2130215 0.02457691 0.001297012 0.03588044
## 120   120 0.03205720 0.2131213 0.02457798 0.001291100 0.03575223
## 121   121 0.03206049 0.2129782 0.02457954 0.001294753 0.03564690
## 122   122 0.03205537 0.2131966 0.02457688 0.001292700 0.03572104
## 123   123 0.03205018 0.2134072 0.02457110 0.001292495 0.03559186
## 124   124 0.03205230 0.2133271 0.02457061 0.001291397 0.03600498
## 125   125 0.03205317 0.2133024 0.02457503 0.001294122 0.03584794
## 126   126 0.03205688 0.2131481 0.02457773 0.001286281 0.03518446
## 127   127 0.03206394 0.2128800 0.02458483 0.001291034 0.03534356
## 128   128 0.03206445 0.2128616 0.02458617 0.001295452 0.03561749
## 129   129 0.03206485 0.2128615 0.02458278 0.001293259 0.03570022
## 130   130 0.03206254 0.2129626 0.02457885 0.001290145 0.03573342
## 131   131 0.03206599 0.2128229 0.02457631 0.001286842 0.03551669
## 132   132 0.03206868 0.2127159 0.02457568 0.001281924 0.03517439
## 133   133 0.03207054 0.2126350 0.02457671 0.001285024 0.03539529
## 134   134 0.03207993 0.2122549 0.02458165 0.001281472 0.03536755
## 135   135 0.03208622 0.2120147 0.02458452 0.001280873 0.03542328
## 136   136 0.03208691 0.2119837 0.02458758 0.001288180 0.03536881
## 137   137 0.03208793 0.2119609 0.02458810 0.001281056 0.03523697
## 138   138 0.03208770 0.2119476 0.02458556 0.001281579 0.03502181
## 139   139 0.03209031 0.2118627 0.02458466 0.001286842 0.03524399
## 140   140 0.03209684 0.2116081 0.02458617 0.001288583 0.03540719
## 141   141 0.03209690 0.2116210 0.02458571 0.001289570 0.03537980
## 142   142 0.03209276 0.2118156 0.02458429 0.001290401 0.03541254
## 143   143 0.03209517 0.2117197 0.02458865 0.001291343 0.03549661
## 144   144 0.03209807 0.2115875 0.02459055 0.001288361 0.03531611
## 145   145 0.03209784 0.2116089 0.02459495 0.001288821 0.03551577
## 146   146 0.03210431 0.2113525 0.02459901 0.001298219 0.03589495
## 147   147 0.03210117 0.2115278 0.02459567 0.001303056 0.03617499
## 148   148 0.03210506 0.2113482 0.02459959 0.001301975 0.03625020
## 149   149 0.03210440 0.2113474 0.02459648 0.001300201 0.03602724
## 150   150 0.03210852 0.2111787 0.02460277 0.001301674 0.03598713
## 151   151 0.03211207 0.2110492 0.02460193 0.001300257 0.03600214
## 152   152 0.03211558 0.2109033 0.02460626 0.001292421 0.03589180
## 153   153 0.03211380 0.2110177 0.02460483 0.001292888 0.03602834
## 154   154 0.03211575 0.2109573 0.02460651 0.001292036 0.03606978
## 155   155 0.03211725 0.2109127 0.02460742 0.001289106 0.03599411
## 156   156 0.03212090 0.2107530 0.02460703 0.001289845 0.03611280
## 157   157 0.03212373 0.2106263 0.02460722 0.001284561 0.03585196
## 158   158 0.03212237 0.2106937 0.02460369 0.001285987 0.03589255
## 159   159 0.03211950 0.2108360 0.02459897 0.001287666 0.03597091
## 160   160 0.03211961 0.2108222 0.02460055 0.001282810 0.03585323
## 161   161 0.03211664 0.2109758 0.02459861 0.001284967 0.03578592
## 162   162 0.03211366 0.2111081 0.02459441 0.001281279 0.03585356
## 163   163 0.03211330 0.2111234 0.02459407 0.001275677 0.03571496
## 164   164 0.03211688 0.2109803 0.02459888 0.001271856 0.03571024
## 165   165 0.03211907 0.2108739 0.02460080 0.001268669 0.03553504
## 166   166 0.03212236 0.2107574 0.02460119 0.001271389 0.03571341
## 167   167 0.03211847 0.2109413 0.02459886 0.001275001 0.03585912
## 168   168 0.03211800 0.2109740 0.02459884 0.001276715 0.03613377
## 169   169 0.03212080 0.2108556 0.02460193 0.001276902 0.03625075
## 170   170 0.03211781 0.2109893 0.02459860 0.001278605 0.03631152
## 171   171 0.03211734 0.2110298 0.02459728 0.001283164 0.03652415
## 172   172 0.03212027 0.2108862 0.02460041 0.001277846 0.03631532
## 173   173 0.03212335 0.2107728 0.02460439 0.001276645 0.03623717
## 174   174 0.03212448 0.2107218 0.02460515 0.001280037 0.03620249
## 175   175 0.03212802 0.2105988 0.02460932 0.001283903 0.03634322
## 176   176 0.03212401 0.2107764 0.02460402 0.001282797 0.03617567
## 177   177 0.03212483 0.2107339 0.02460525 0.001283613 0.03604867
## 178   178 0.03212353 0.2107950 0.02460550 0.001285138 0.03618274
## 179   179 0.03211886 0.2109922 0.02460375 0.001281894 0.03624237
## 180   180 0.03211934 0.2109706 0.02460538 0.001278688 0.03620059
## 181   181 0.03211670 0.2110887 0.02460443 0.001279549 0.03624950
## 182   182 0.03211641 0.2111005 0.02460419 0.001279913 0.03625584
## 183   183 0.03211466 0.2112043 0.02460449 0.001283118 0.03642680
## 184   184 0.03211480 0.2112210 0.02460821 0.001283509 0.03659481
## 185   185 0.03211289 0.2112854 0.02460811 0.001283448 0.03658422
## 186   186 0.03210934 0.2114380 0.02460548 0.001282811 0.03657851
## 187   187 0.03210792 0.2115001 0.02460412 0.001278320 0.03662367
## 188   188 0.03210691 0.2115517 0.02460385 0.001277447 0.03669301
## 189   189 0.03210871 0.2114972 0.02460355 0.001278651 0.03677788
## 190   190 0.03210751 0.2115394 0.02460230 0.001278425 0.03675082
## 191   191 0.03210748 0.2115358 0.02460312 0.001275565 0.03654684
## 192   192 0.03211172 0.2113676 0.02460725 0.001277297 0.03652225
## 193   193 0.03211523 0.2112143 0.02461184 0.001274596 0.03645782
## 194   194 0.03211669 0.2111593 0.02461317 0.001274349 0.03644554
## 195   195 0.03211394 0.2112754 0.02461286 0.001275890 0.03651445
## 196   196 0.03211460 0.2112429 0.02461446 0.001272721 0.03645428
## 197   197 0.03211448 0.2112530 0.02461544 0.001273983 0.03644215
## 198   198 0.03211460 0.2112664 0.02461689 0.001274038 0.03640015
## 199   199 0.03211280 0.2113385 0.02461597 0.001275285 0.03642839
## 200   200 0.03211056 0.2114314 0.02461493 0.001272104 0.03628747
## 201   201 0.03211157 0.2113938 0.02461733 0.001272659 0.03627531
## 202   202 0.03211234 0.2113689 0.02461790 0.001276421 0.03638751
## 203   203 0.03211040 0.2114433 0.02461549 0.001276149 0.03650638
## 204   204 0.03211078 0.2114262 0.02461610 0.001275630 0.03650562
## 205   205 0.03210932 0.2114865 0.02461411 0.001276812 0.03654330
## 206   206 0.03210991 0.2114623 0.02461523 0.001276235 0.03649789
## 207   207 0.03211093 0.2114160 0.02461563 0.001278033 0.03650937
## 208   208 0.03211307 0.2113280 0.02461641 0.001277289 0.03651890
## 209   209 0.03211398 0.2112908 0.02461888 0.001275604 0.03640314
## 210   210 0.03211373 0.2112945 0.02461745 0.001272890 0.03631021
## 211   211 0.03211285 0.2113385 0.02461592 0.001273426 0.03637260
## 212   212 0.03211224 0.2113639 0.02461477 0.001273669 0.03630954
## 213   213 0.03211188 0.2113791 0.02461543 0.001272946 0.03635200
## 214   214 0.03211201 0.2113709 0.02461609 0.001274874 0.03638781
## 215   215 0.03211064 0.2114320 0.02461426 0.001275398 0.03637071
## 216   216 0.03211167 0.2113819 0.02461471 0.001273967 0.03630651
## 217   217 0.03211209 0.2113648 0.02461497 0.001274126 0.03628175
## 218   218 0.03211249 0.2113505 0.02461523 0.001274541 0.03632590
## 219   219 0.03211227 0.2113587 0.02461507 0.001273909 0.03632334
## 220   220 0.03211320 0.2113102 0.02461598 0.001273784 0.03628852
## 221   221 0.03211368 0.2112935 0.02461591 0.001275246 0.03632758
## 222   222 0.03211446 0.2112620 0.02461723 0.001276137 0.03637547
## 223   223 0.03211577 0.2112041 0.02461773 0.001275653 0.03636885
## 224   224 0.03211624 0.2111851 0.02461784 0.001275289 0.03637780
## 225   225 0.03211608 0.2111942 0.02461824 0.001275950 0.03639167
## 226   226 0.03211631 0.2111844 0.02461856 0.001275566 0.03635043
## 227   227 0.03211699 0.2111581 0.02461883 0.001275106 0.03636560
## 228   228 0.03211754 0.2111360 0.02461884 0.001275122 0.03634111
## 229   229 0.03211846 0.2110916 0.02461944 0.001274756 0.03633198
## 230   230 0.03211878 0.2110758 0.02461948 0.001274633 0.03632161
## 231   231 0.03211911 0.2110609 0.02461979 0.001274575 0.03631451
## 232   232 0.03211888 0.2110705 0.02461982 0.001275218 0.03631995
## 233   233 0.03211883 0.2110720 0.02461967 0.001275246 0.03630390
## 234   234 0.03211810 0.2111045 0.02461931 0.001275721 0.03632636
## 235   235 0.03211787 0.2111158 0.02461909 0.001276226 0.03634494
## 236   236 0.03211763 0.2111270 0.02461898 0.001276244 0.03635228
## 237   237 0.03211759 0.2111286 0.02461883 0.001276272 0.03634773
## 238   238 0.03211757 0.2111288 0.02461888 0.001276138 0.03634485
## 239   239 0.03211759 0.2111282 0.02461893 0.001276164 0.03634986
## 240   240 0.03211761 0.2111276 0.02461892 0.001276161 0.03635025
##            MAESD
## 1   0.0007738764
## 2   0.0006800497
## 3   0.0006076308
## 4   0.0006490686
## 5   0.0006124246
## 6   0.0005400859
## 7   0.0005590591
## 8   0.0005646895
## 9   0.0005887279
## 10  0.0006101115
## 11  0.0006157131
## 12  0.0006423545
## 13  0.0006439677
## 14  0.0006360397
## 15  0.0006247845
## 16  0.0006443208
## 17  0.0006440588
## 18  0.0006636083
## 19  0.0006717744
## 20  0.0006540925
## 21  0.0006552731
## 22  0.0006600059
## 23  0.0006545425
## 24  0.0006442102
## 25  0.0006409987
## 26  0.0006501888
## 27  0.0006460050
## 28  0.0006322024
## 29  0.0006367633
## 30  0.0006382240
## 31  0.0006346605
## 32  0.0006431926
## 33  0.0006503080
## 34  0.0006360372
## 35  0.0006256907
## 36  0.0006020870
## 37  0.0006051330
## 38  0.0006058768
## 39  0.0005995760
## 40  0.0005977160
## 41  0.0006051742
## 42  0.0006132250
## 43  0.0006163476
## 44  0.0006200530
## 45  0.0006207565
## 46  0.0006096226
## 47  0.0006060211
## 48  0.0005923208
## 49  0.0005826308
## 50  0.0006008142
## 51  0.0006139875
## 52  0.0006206735
## 53  0.0006228179
## 54  0.0006263955
## 55  0.0006386607
## 56  0.0006419906
## 57  0.0006528991
## 58  0.0006585933
## 59  0.0006513759
## 60  0.0006636015
## 61  0.0006627526
## 62  0.0006592102
## 63  0.0006487573
## 64  0.0006461781
## 65  0.0006524623
## 66  0.0006534325
## 67  0.0006414960
## 68  0.0006377803
## 69  0.0006476654
## 70  0.0006562368
## 71  0.0006570293
## 72  0.0006586343
## 73  0.0006545822
## 74  0.0006604541
## 75  0.0006635751
## 76  0.0006619200
## 77  0.0006605706
## 78  0.0006638180
## 79  0.0006622449
## 80  0.0006626720
## 81  0.0006759645
## 82  0.0006836056
## 83  0.0006786323
## 84  0.0006728712
## 85  0.0006803436
## 86  0.0006873353
## 87  0.0006930569
## 88  0.0006924919
## 89  0.0006823691
## 90  0.0006664005
## 91  0.0006597366
## 92  0.0006521399
## 93  0.0006501486
## 94  0.0006387888
## 95  0.0006369111
## 96  0.0006381132
## 97  0.0006402372
## 98  0.0006420527
## 99  0.0006471567
## 100 0.0006450514
## 101 0.0006445603
## 102 0.0006383039
## 103 0.0006287104
## 104 0.0006347915
## 105 0.0006338678
## 106 0.0006234954
## 107 0.0006238803
## 108 0.0006231627
## 109 0.0006200587
## 110 0.0006284415
## 111 0.0006225923
## 112 0.0006233987
## 113 0.0006276339
## 114 0.0006309570
## 115 0.0006324000
## 116 0.0006357294
## 117 0.0006440434
## 118 0.0006394661
## 119 0.0006391256
## 120 0.0006336310
## 121 0.0006371450
## 122 0.0006316621
## 123 0.0006272539
## 124 0.0006272481
## 125 0.0006261343
## 126 0.0006241280
## 127 0.0006322349
## 128 0.0006332429
## 129 0.0006359529
## 130 0.0006339938
## 131 0.0006267091
## 132 0.0006195183
## 133 0.0006199606
## 134 0.0006142189
## 135 0.0006125138
## 136 0.0006180653
## 137 0.0006087392
## 138 0.0006061207
## 139 0.0006128750
## 140 0.0006153728
## 141 0.0006181649
## 142 0.0006208905
## 143 0.0006250078
## 144 0.0006220642
## 145 0.0006200517
## 146 0.0006250022
## 147 0.0006300755
## 148 0.0006296502
## 149 0.0006327868
## 150 0.0006322606
## 151 0.0006337871
## 152 0.0006308611
## 153 0.0006295410
## 154 0.0006276022
## 155 0.0006282198
## 156 0.0006298375
## 157 0.0006257675
## 158 0.0006277134
## 159 0.0006296842
## 160 0.0006273123
## 161 0.0006313019
## 162 0.0006281586
## 163 0.0006246241
## 164 0.0006178806
## 165 0.0006174916
## 166 0.0006176135
## 167 0.0006212200
## 168 0.0006232316
## 169 0.0006236766
## 170 0.0006247309
## 171 0.0006279044
## 172 0.0006201104
## 173 0.0006189479
## 174 0.0006213503
## 175 0.0006253427
## 176 0.0006201353
## 177 0.0006238007
## 178 0.0006261481
## 179 0.0006235200
## 180 0.0006194299
## 181 0.0006170560
## 182 0.0006201539
## 183 0.0006207200
## 184 0.0006206465
## 185 0.0006198265
## 186 0.0006217939
## 187 0.0006176807
## 188 0.0006152187
## 189 0.0006168947
## 190 0.0006168908
## 191 0.0006157917
## 192 0.0006159418
## 193 0.0006143245
## 194 0.0006150036
## 195 0.0006154492
## 196 0.0006142987
## 197 0.0006152132
## 198 0.0006113502
## 199 0.0006108036
## 200 0.0006082577
## 201 0.0006085016
## 202 0.0006121678
## 203 0.0006118951
## 204 0.0006129577
## 205 0.0006134732
## 206 0.0006128079
## 207 0.0006134981
## 208 0.0006133182
## 209 0.0006120479
## 210 0.0006086570
## 211 0.0006092435
## 212 0.0006093801
## 213 0.0006084648
## 214 0.0006088718
## 215 0.0006099132
## 216 0.0006093848
## 217 0.0006088952
## 218 0.0006097264
## 219 0.0006088481
## 220 0.0006092012
## 221 0.0006100405
## 222 0.0006108499
## 223 0.0006102335
## 224 0.0006098857
## 225 0.0006105760
## 226 0.0006092498
## 227 0.0006080224
## 228 0.0006079035
## 229 0.0006076066
## 230 0.0006072950
## 231 0.0006073377
## 232 0.0006076399
## 233 0.0006076180
## 234 0.0006080520
## 235 0.0006083319
## 236 0.0006083134
## 237 0.0006082731
## 238 0.0006084270
## 239 0.0006084667
## 240 0.0006084272
## [1] "Best Model"
##   nvmax
## 8     8

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

## [1] "Coefficients of final model:"
##                  Estimate         2.5 %        97.5 %
## (Intercept)  2.000745e+00  1.9944123678  2.007078e+00
## x4          -5.339736e-05 -0.0000708296 -3.596511e-05
## x7           1.126502e-02  0.0100390043  1.249104e-02
## x9           3.419457e-03  0.0027827248  4.056190e-03
## x16          8.893311e-04  0.0004770930  1.301569e-03
## x17          1.569187e-03  0.0009371998  2.201173e-03
## stat98       3.679003e-03  0.0032090828  4.148922e-03
## stat110     -3.396195e-03 -0.0038709613 -2.921428e-03
## sqrt.x18     2.634127e-02  0.0245190385  2.816351e-02

Test

if (algo.backward.caret == TRUE){
  test.model(model.backward, data.test
             ,method = 'leapBackward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.042   2.086   2.098   2.098   2.111   2.145 
## [1] "leapBackward  Test MSE: 0.00104706068550194"

Backward Elimination with CV (w/ filtered train)

Train

if (algo.backward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method =  "leapBackward"
                                   ,feature.names = feature.names)
  model.backward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 13 on full training set
## [1] "All models results"
##     nvmax       RMSE  Rsquared        MAE       RMSESD RsquaredSD
## 1       1 0.02870415 0.1530762 0.02317988 0.0006951382 0.01549603
## 2       2 0.02757227 0.2187844 0.02242433 0.0007527183 0.01820421
## 3       3 0.02705829 0.2479533 0.02192760 0.0008285373 0.02374708
## 4       4 0.02625232 0.2917906 0.02107256 0.0007462016 0.02401253
## 5       5 0.02588014 0.3117589 0.02079034 0.0008063445 0.03139083
## 6       6 0.02571972 0.3202667 0.02066781 0.0008228725 0.03226738
## 7       7 0.02565726 0.3235663 0.02065431 0.0007901694 0.03048056
## 8       8 0.02555620 0.3288700 0.02061242 0.0008109593 0.03123911
## 9       9 0.02555436 0.3290776 0.02063900 0.0008119461 0.03171220
## 10     10 0.02551324 0.3312508 0.02062474 0.0008234722 0.03246361
## 11     11 0.02539411 0.3374719 0.02053013 0.0008413873 0.03325789
## 12     12 0.02539798 0.3373039 0.02050649 0.0008320889 0.03222254
## 13     13 0.02538292 0.3380596 0.02049708 0.0008467126 0.03340128
## 14     14 0.02542575 0.3358779 0.02052054 0.0008402712 0.03180199
## 15     15 0.02543250 0.3355319 0.02052936 0.0008037380 0.02999873
## 16     16 0.02545470 0.3343357 0.02056416 0.0007952837 0.03013225
## 17     17 0.02545654 0.3342588 0.02057957 0.0007956435 0.03035087
## 18     18 0.02545391 0.3343794 0.02057685 0.0007859445 0.02947075
## 19     19 0.02545122 0.3345469 0.02058123 0.0008162614 0.03082608
## 20     20 0.02546820 0.3337167 0.02059368 0.0007998900 0.02942320
## 21     21 0.02550194 0.3320462 0.02060998 0.0007885604 0.02839988
## 22     22 0.02552447 0.3309719 0.02062632 0.0007743546 0.02762172
## 23     23 0.02552634 0.3308952 0.02062798 0.0007694063 0.02726057
## 24     24 0.02553502 0.3305086 0.02063531 0.0007736505 0.02707113
## 25     25 0.02552533 0.3309980 0.02061974 0.0007779228 0.02746531
## 26     26 0.02553834 0.3303321 0.02062741 0.0007639262 0.02709192
## 27     27 0.02554942 0.3298179 0.02063532 0.0007679690 0.02693828
## 28     28 0.02555404 0.3296132 0.02064477 0.0007663203 0.02575848
## 29     29 0.02556286 0.3292215 0.02066119 0.0007470960 0.02468626
## 30     30 0.02557289 0.3287608 0.02065684 0.0007528794 0.02471107
## 31     31 0.02557913 0.3284744 0.02066263 0.0007546137 0.02540329
## 32     32 0.02557143 0.3289600 0.02065721 0.0007440628 0.02502614
## 33     33 0.02556874 0.3291899 0.02065436 0.0007389325 0.02451529
## 34     34 0.02556716 0.3292914 0.02064183 0.0007344734 0.02468540
## 35     35 0.02556268 0.3295293 0.02063034 0.0007143900 0.02403278
## 36     36 0.02555657 0.3298343 0.02063137 0.0007010408 0.02405153
## 37     37 0.02555811 0.3298564 0.02062885 0.0007046262 0.02404771
## 38     38 0.02556146 0.3297122 0.02062592 0.0007303641 0.02459030
## 39     39 0.02555824 0.3299086 0.02063027 0.0007371200 0.02516689
## 40     40 0.02554893 0.3304247 0.02061362 0.0007274153 0.02507306
## 41     41 0.02552466 0.3316874 0.02059878 0.0007100448 0.02506924
## 42     42 0.02550789 0.3325643 0.02057645 0.0007172860 0.02511443
## 43     43 0.02551445 0.3322525 0.02058675 0.0007195394 0.02474627
## 44     44 0.02551015 0.3325033 0.02058479 0.0007341002 0.02511012
## 45     45 0.02549363 0.3333742 0.02057402 0.0007458980 0.02585765
## 46     46 0.02547569 0.3343098 0.02057038 0.0007385568 0.02492109
## 47     47 0.02546548 0.3348126 0.02056361 0.0007442004 0.02536786
## 48     48 0.02545969 0.3351072 0.02056071 0.0007501756 0.02491756
## 49     49 0.02547272 0.3345110 0.02056599 0.0007530545 0.02502326
## 50     50 0.02546975 0.3346622 0.02056458 0.0007401959 0.02486179
## 51     51 0.02547675 0.3343356 0.02057706 0.0007336209 0.02459170
## 52     52 0.02546919 0.3347167 0.02057239 0.0007400188 0.02522930
## 53     53 0.02546964 0.3346724 0.02057358 0.0007365887 0.02530956
## 54     54 0.02546978 0.3347259 0.02057537 0.0007307085 0.02520264
## 55     55 0.02547387 0.3345405 0.02057245 0.0007480616 0.02546730
## 56     56 0.02548356 0.3340756 0.02058736 0.0007399345 0.02511968
## 57     57 0.02547531 0.3344755 0.02057428 0.0007407648 0.02486420
## 58     58 0.02548259 0.3341192 0.02057490 0.0007186693 0.02390972
## 59     59 0.02548461 0.3340371 0.02057111 0.0007249521 0.02415666
## 60     60 0.02549126 0.3337155 0.02057493 0.0007299292 0.02455878
## 61     61 0.02550074 0.3332527 0.02058123 0.0007220950 0.02470126
## 62     62 0.02550829 0.3329440 0.02058783 0.0007320370 0.02506377
## 63     63 0.02550417 0.3331772 0.02058879 0.0007280051 0.02496801
## 64     64 0.02549640 0.3336123 0.02057608 0.0007306986 0.02559537
## 65     65 0.02548848 0.3340374 0.02056792 0.0007152400 0.02541104
## 66     66 0.02548806 0.3340704 0.02056454 0.0007083862 0.02481703
## 67     67 0.02548634 0.3342137 0.02055684 0.0006972006 0.02457891
## 68     68 0.02547837 0.3345990 0.02054772 0.0006994289 0.02482702
## 69     69 0.02547992 0.3344998 0.02054904 0.0006942295 0.02475370
## 70     70 0.02549030 0.3339911 0.02055620 0.0006942516 0.02473423
## 71     71 0.02548858 0.3340725 0.02055417 0.0006943934 0.02471408
## 72     72 0.02548828 0.3340937 0.02055407 0.0006992519 0.02482278
## 73     73 0.02548965 0.3340611 0.02054987 0.0007029571 0.02482409
## 74     74 0.02548532 0.3343505 0.02054231 0.0007005656 0.02476650
## 75     75 0.02548307 0.3344668 0.02054545 0.0007004473 0.02518638
## 76     76 0.02548505 0.3343636 0.02054781 0.0007018001 0.02502601
## 77     77 0.02549832 0.3337254 0.02056006 0.0007042600 0.02498685
## 78     78 0.02551170 0.3330569 0.02057090 0.0007055736 0.02499928
## 79     79 0.02551308 0.3329824 0.02057377 0.0006856532 0.02437022
## 80     80 0.02551646 0.3328281 0.02057245 0.0006873119 0.02458842
## 81     81 0.02551899 0.3327055 0.02057691 0.0006840386 0.02487086
## 82     82 0.02551418 0.3328928 0.02057215 0.0006723072 0.02435066
## 83     83 0.02551718 0.3327651 0.02057686 0.0006699851 0.02416045
## 84     84 0.02551365 0.3329606 0.02057239 0.0006686242 0.02375084
## 85     85 0.02550842 0.3332108 0.02057004 0.0006642085 0.02367576
## 86     86 0.02550841 0.3332225 0.02056801 0.0006702436 0.02349518
## 87     87 0.02550744 0.3333112 0.02056187 0.0006713730 0.02323578
## 88     88 0.02550690 0.3333467 0.02056655 0.0006686671 0.02311413
## 89     89 0.02551177 0.3331465 0.02057750 0.0006756879 0.02328940
## 90     90 0.02551251 0.3331470 0.02058365 0.0006879538 0.02372625
## 91     91 0.02551963 0.3328417 0.02059061 0.0006801753 0.02346919
## 92     92 0.02552148 0.3327766 0.02058720 0.0006782238 0.02322769
## 93     93 0.02552602 0.3325618 0.02059594 0.0006894387 0.02346268
## 94     94 0.02552709 0.3325003 0.02058840 0.0006908346 0.02360486
## 95     95 0.02552978 0.3323873 0.02059360 0.0006855826 0.02341861
## 96     96 0.02553534 0.3320968 0.02059274 0.0006887643 0.02339826
## 97     97 0.02553666 0.3320326 0.02059519 0.0006930233 0.02422395
## 98     98 0.02553376 0.3321660 0.02059509 0.0006902401 0.02411403
## 99     99 0.02553675 0.3320115 0.02059568 0.0006859523 0.02407444
## 100   100 0.02554673 0.3315267 0.02060573 0.0006874175 0.02424392
## 101   101 0.02554744 0.3315171 0.02060718 0.0006827710 0.02441354
## 102   102 0.02554768 0.3315320 0.02060962 0.0006904362 0.02458161
## 103   103 0.02555305 0.3313079 0.02061416 0.0006963031 0.02481470
## 104   104 0.02556295 0.3308943 0.02061863 0.0007033687 0.02512937
## 105   105 0.02555161 0.3314676 0.02060876 0.0006977911 0.02487507
## 106   106 0.02555522 0.3312911 0.02061317 0.0006941869 0.02486316
## 107   107 0.02556240 0.3309464 0.02061843 0.0006935792 0.02484722
## 108   108 0.02555624 0.3312439 0.02061177 0.0006900255 0.02483631
## 109   109 0.02555462 0.3313343 0.02060816 0.0006898007 0.02465502
## 110   110 0.02556296 0.3309486 0.02061704 0.0006876287 0.02471742
## 111   111 0.02556423 0.3308860 0.02061580 0.0006955943 0.02519048
## 112   112 0.02555757 0.3312083 0.02060365 0.0006972784 0.02534343
## 113   113 0.02555211 0.3314880 0.02059674 0.0007015971 0.02553802
## 114   114 0.02554885 0.3316572 0.02059696 0.0006924715 0.02504299
## 115   115 0.02555780 0.3312362 0.02060305 0.0006874995 0.02479997
## 116   116 0.02555659 0.3313095 0.02060089 0.0006848861 0.02474092
## 117   117 0.02554755 0.3317286 0.02059355 0.0006823588 0.02459033
## 118   118 0.02554297 0.3319780 0.02059609 0.0006839887 0.02446509
## 119   119 0.02554151 0.3320814 0.02059368 0.0006856921 0.02456839
## 120   120 0.02554902 0.3316975 0.02059467 0.0006848968 0.02431189
## 121   121 0.02554640 0.3318284 0.02060014 0.0006849769 0.02430354
## 122   122 0.02554276 0.3320412 0.02060026 0.0006922843 0.02453696
## 123   123 0.02553461 0.3324272 0.02059095 0.0006932616 0.02481188
## 124   124 0.02553810 0.3322421 0.02059342 0.0006810968 0.02453236
## 125   125 0.02553916 0.3322189 0.02059217 0.0006848526 0.02467833
## 126   126 0.02554181 0.3320878 0.02059583 0.0006887770 0.02494491
## 127   127 0.02554621 0.3318988 0.02060051 0.0006808466 0.02448502
## 128   128 0.02555091 0.3316811 0.02060691 0.0006809580 0.02454894
## 129   129 0.02554862 0.3318127 0.02060191 0.0006824302 0.02454872
## 130   130 0.02554822 0.3318166 0.02060420 0.0006868540 0.02450763
## 131   131 0.02555242 0.3315744 0.02060830 0.0006845499 0.02458928
## 132   132 0.02555433 0.3314702 0.02061242 0.0006743914 0.02445474
## 133   133 0.02555652 0.3313615 0.02061418 0.0006657314 0.02407742
## 134   134 0.02555809 0.3313177 0.02061933 0.0006714024 0.02407970
## 135   135 0.02555349 0.3315341 0.02061678 0.0006711069 0.02426132
## 136   136 0.02555891 0.3313102 0.02062095 0.0006739282 0.02451748
## 137   137 0.02556703 0.3309359 0.02062891 0.0006722356 0.02462246
## 138   138 0.02556339 0.3311297 0.02062523 0.0006662754 0.02432837
## 139   139 0.02557230 0.3307233 0.02063486 0.0006605023 0.02415675
## 140   140 0.02557590 0.3305505 0.02063710 0.0006570580 0.02417569
## 141   141 0.02557116 0.3307984 0.02063456 0.0006595728 0.02429729
## 142   142 0.02556862 0.3309389 0.02063676 0.0006610024 0.02442110
## 143   143 0.02556752 0.3310217 0.02063686 0.0006637238 0.02421016
## 144   144 0.02557011 0.3308943 0.02063808 0.0006655253 0.02436967
## 145   145 0.02557048 0.3308788 0.02063852 0.0006662378 0.02462039
## 146   146 0.02557196 0.3308144 0.02063848 0.0006669475 0.02493017
## 147   147 0.02557117 0.3308348 0.02063473 0.0006659917 0.02509881
## 148   148 0.02556967 0.3309188 0.02063782 0.0006727799 0.02522881
## 149   149 0.02557371 0.3307266 0.02064064 0.0006747691 0.02549537
## 150   150 0.02557300 0.3307718 0.02063891 0.0006761412 0.02561376
## 151   151 0.02557523 0.3306564 0.02063875 0.0006767199 0.02586142
## 152   152 0.02557790 0.3305492 0.02064263 0.0006773456 0.02571972
## 153   153 0.02557213 0.3307911 0.02063785 0.0006743838 0.02584091
## 154   154 0.02557445 0.3306591 0.02064309 0.0006729071 0.02618493
## 155   155 0.02557440 0.3306513 0.02063903 0.0006719194 0.02642174
## 156   156 0.02557268 0.3307423 0.02064160 0.0006712548 0.02630995
## 157   157 0.02557432 0.3306842 0.02064197 0.0006720355 0.02616926
## 158   158 0.02557227 0.3307905 0.02064172 0.0006712965 0.02591016
## 159   159 0.02557617 0.3305988 0.02064417 0.0006675836 0.02597471
## 160   160 0.02557912 0.3304423 0.02064580 0.0006667461 0.02570276
## 161   161 0.02558106 0.3303532 0.02064658 0.0006662316 0.02560548
## 162   162 0.02558244 0.3302858 0.02064643 0.0006692442 0.02579840
## 163   163 0.02558344 0.3302408 0.02064687 0.0006764787 0.02601294
## 164   164 0.02558477 0.3301942 0.02064888 0.0006796621 0.02620297
## 165   165 0.02558352 0.3302479 0.02064800 0.0006820120 0.02632589
## 166   166 0.02558551 0.3301585 0.02064835 0.0006800315 0.02602624
## 167   167 0.02558693 0.3300810 0.02064946 0.0006800974 0.02597692
## 168   168 0.02559155 0.3298556 0.02065202 0.0006747478 0.02563419
## 169   169 0.02559414 0.3297256 0.02065447 0.0006744566 0.02572590
## 170   170 0.02559379 0.3297610 0.02065428 0.0006728460 0.02566729
## 171   171 0.02559361 0.3297753 0.02065487 0.0006732009 0.02570677
## 172   172 0.02559618 0.3296702 0.02065651 0.0006708888 0.02545613
## 173   173 0.02559574 0.3296898 0.02065688 0.0006679298 0.02534206
## 174   174 0.02559073 0.3299309 0.02065185 0.0006675536 0.02536126
## 175   175 0.02559707 0.3296242 0.02065524 0.0006661348 0.02545149
## 176   176 0.02559510 0.3297326 0.02065229 0.0006650341 0.02530889
## 177   177 0.02559716 0.3296332 0.02065332 0.0006677735 0.02523118
## 178   178 0.02559670 0.3296583 0.02065446 0.0006680898 0.02522665
## 179   179 0.02559609 0.3296915 0.02065310 0.0006675795 0.02522787
## 180   180 0.02559466 0.3297647 0.02065185 0.0006677933 0.02521973
## 181   181 0.02559607 0.3297051 0.02065441 0.0006674731 0.02515470
## 182   182 0.02559559 0.3297232 0.02065391 0.0006675500 0.02533029
## 183   183 0.02559959 0.3295398 0.02065807 0.0006676451 0.02517610
## 184   184 0.02559994 0.3295215 0.02065872 0.0006668781 0.02513309
## 185   185 0.02560136 0.3294595 0.02066176 0.0006692109 0.02513318
## 186   186 0.02560137 0.3294610 0.02066181 0.0006709452 0.02526053
## 187   187 0.02560096 0.3294772 0.02066106 0.0006711053 0.02528507
## 188   188 0.02559894 0.3295796 0.02066042 0.0006719924 0.02529631
## 189   189 0.02559909 0.3295844 0.02065835 0.0006724053 0.02529040
## 190   190 0.02559981 0.3295580 0.02065749 0.0006716291 0.02532568
## 191   191 0.02559899 0.3296108 0.02065592 0.0006731505 0.02526121
## 192   192 0.02560005 0.3295552 0.02065794 0.0006695267 0.02513382
## 193   193 0.02560207 0.3294665 0.02065988 0.0006681763 0.02495956
## 194   194 0.02560599 0.3292774 0.02066261 0.0006685867 0.02491479
## 195   195 0.02560670 0.3292523 0.02066373 0.0006687499 0.02492971
## 196   196 0.02560595 0.3292856 0.02066309 0.0006676690 0.02496523
## 197   197 0.02560602 0.3292777 0.02066149 0.0006657192 0.02488256
## 198   198 0.02560746 0.3292136 0.02066236 0.0006656534 0.02484393
## 199   199 0.02560760 0.3292054 0.02066191 0.0006639616 0.02478677
## 200   200 0.02560881 0.3291439 0.02066335 0.0006641473 0.02471569
## 201   201 0.02560876 0.3291476 0.02066360 0.0006640487 0.02465580
## 202   202 0.02560876 0.3291471 0.02066321 0.0006662015 0.02468036
## 203   203 0.02560941 0.3291067 0.02066317 0.0006668886 0.02474915
## 204   204 0.02560935 0.3291026 0.02066225 0.0006685045 0.02473862
## 205   205 0.02560986 0.3290882 0.02066347 0.0006678053 0.02470177
## 206   206 0.02560894 0.3291286 0.02066185 0.0006678967 0.02473858
## 207   207 0.02561070 0.3290360 0.02066410 0.0006681619 0.02472004
## 208   208 0.02561214 0.3289585 0.02066472 0.0006682274 0.02469713
## 209   209 0.02561282 0.3289286 0.02066501 0.0006684388 0.02469809
## 210   210 0.02561388 0.3288822 0.02066661 0.0006672470 0.02465098
## 211   211 0.02561308 0.3289173 0.02066610 0.0006658109 0.02462730
## 212   212 0.02561339 0.3289000 0.02066663 0.0006646152 0.02452479
## 213   213 0.02561525 0.3288095 0.02066855 0.0006642747 0.02446256
## 214   214 0.02561725 0.3287086 0.02066915 0.0006643516 0.02444528
## 215   215 0.02561783 0.3286816 0.02066983 0.0006645588 0.02446005
## 216   216 0.02561705 0.3287205 0.02066930 0.0006647161 0.02447879
## 217   217 0.02561667 0.3287454 0.02066850 0.0006638121 0.02437260
## 218   218 0.02561572 0.3287905 0.02066840 0.0006628240 0.02438716
## 219   219 0.02561600 0.3287732 0.02066878 0.0006625069 0.02434128
## 220   220 0.02561640 0.3287514 0.02066895 0.0006622662 0.02433941
## 221   221 0.02561574 0.3287811 0.02066820 0.0006617399 0.02433212
## 222   222 0.02561545 0.3287937 0.02066737 0.0006614349 0.02433149
## 223   223 0.02561528 0.3288007 0.02066733 0.0006616678 0.02435067
## 224   224 0.02561585 0.3287736 0.02066770 0.0006622896 0.02439444
## 225   225 0.02561581 0.3287775 0.02066781 0.0006622574 0.02439560
## 226   226 0.02561603 0.3287715 0.02066810 0.0006624284 0.02434477
## 227   227 0.02561640 0.3287553 0.02066907 0.0006623749 0.02435461
## 228   228 0.02561718 0.3287171 0.02067009 0.0006621445 0.02431719
## 229   229 0.02561694 0.3287275 0.02067011 0.0006626219 0.02431851
## 230   230 0.02561665 0.3287428 0.02067009 0.0006629669 0.02437113
## 231   231 0.02561627 0.3287603 0.02066974 0.0006625980 0.02440926
## 232   232 0.02561624 0.3287616 0.02066966 0.0006630261 0.02441961
## 233   233 0.02561610 0.3287702 0.02066972 0.0006630654 0.02442058
## 234   234 0.02561591 0.3287804 0.02066947 0.0006631370 0.02442594
## 235   235 0.02561573 0.3287874 0.02066952 0.0006631023 0.02442544
## 236   236 0.02561569 0.3287896 0.02066970 0.0006630408 0.02442450
## 237   237 0.02561555 0.3287963 0.02066948 0.0006630341 0.02442236
## 238   238 0.02561561 0.3287938 0.02066947 0.0006630073 0.02441925
## 239   239 0.02561556 0.3287961 0.02066950 0.0006629952 0.02442089
## 240   240 0.02561559 0.3287947 0.02066959 0.0006629916 0.02442307
##            MAESD
## 1   0.0004505032
## 2   0.0005029019
## 3   0.0005406257
## 4   0.0005359142
## 5   0.0005696292
## 6   0.0006007779
## 7   0.0005954885
## 8   0.0006210426
## 9   0.0006184094
## 10  0.0006296173
## 11  0.0006390197
## 12  0.0006210189
## 13  0.0006286162
## 14  0.0006194143
## 15  0.0005865930
## 16  0.0005951725
## 17  0.0005887401
## 18  0.0005817768
## 19  0.0005936856
## 20  0.0005908454
## 21  0.0005744523
## 22  0.0005481475
## 23  0.0005311006
## 24  0.0005222598
## 25  0.0005169469
## 26  0.0005076054
## 27  0.0005014309
## 28  0.0005017524
## 29  0.0004815462
## 30  0.0004897265
## 31  0.0004913960
## 32  0.0004832518
## 33  0.0004798436
## 34  0.0004812590
## 35  0.0004651545
## 36  0.0004598442
## 37  0.0004588291
## 38  0.0004884003
## 39  0.0004993664
## 40  0.0005055973
## 41  0.0005078835
## 42  0.0005328945
## 43  0.0005291357
## 44  0.0005421590
## 45  0.0005420934
## 46  0.0005355607
## 47  0.0005419661
## 48  0.0005486828
## 49  0.0005499641
## 50  0.0005444212
## 51  0.0005369791
## 52  0.0005492299
## 53  0.0005425073
## 54  0.0005343678
## 55  0.0005464594
## 56  0.0005307724
## 57  0.0005255558
## 58  0.0005135883
## 59  0.0005138371
## 60  0.0005155447
## 61  0.0005141296
## 62  0.0005192869
## 63  0.0005219294
## 64  0.0005288934
## 65  0.0005197166
## 66  0.0005076102
## 67  0.0005067929
## 68  0.0005173013
## 69  0.0005145013
## 70  0.0005183614
## 71  0.0005152033
## 72  0.0005282579
## 73  0.0005306924
## 74  0.0005232826
## 75  0.0005289601
## 76  0.0005226918
## 77  0.0005200548
## 78  0.0005170807
## 79  0.0005053118
## 80  0.0005173992
## 81  0.0005155880
## 82  0.0005017438
## 83  0.0004978109
## 84  0.0004942861
## 85  0.0004930346
## 86  0.0004959138
## 87  0.0004962875
## 88  0.0004923440
## 89  0.0004951221
## 90  0.0005051558
## 91  0.0004987115
## 92  0.0004933783
## 93  0.0005055564
## 94  0.0005067906
## 95  0.0005013384
## 96  0.0005033702
## 97  0.0005093801
## 98  0.0004987890
## 99  0.0004942700
## 100 0.0004982046
## 101 0.0004951950
## 102 0.0005013575
## 103 0.0005047831
## 104 0.0005079371
## 105 0.0005042581
## 106 0.0005006302
## 107 0.0005035957
## 108 0.0005008256
## 109 0.0005014381
## 110 0.0004987366
## 111 0.0005001622
## 112 0.0004965526
## 113 0.0004986553
## 114 0.0004925245
## 115 0.0004877192
## 116 0.0004919504
## 117 0.0004910187
## 118 0.0004879479
## 119 0.0004914374
## 120 0.0004906183
## 121 0.0004889170
## 122 0.0004965666
## 123 0.0004993722
## 124 0.0004876399
## 125 0.0004879605
## 126 0.0004908129
## 127 0.0004791816
## 128 0.0004758174
## 129 0.0004745347
## 130 0.0004807637
## 131 0.0004798211
## 132 0.0004743785
## 133 0.0004657492
## 134 0.0004694845
## 135 0.0004692183
## 136 0.0004741461
## 137 0.0004709643
## 138 0.0004672539
## 139 0.0004616652
## 140 0.0004580521
## 141 0.0004567343
## 142 0.0004599239
## 143 0.0004637004
## 144 0.0004656345
## 145 0.0004638082
## 146 0.0004670471
## 147 0.0004681612
## 148 0.0004734374
## 149 0.0004753229
## 150 0.0004808664
## 151 0.0004834259
## 152 0.0004840002
## 153 0.0004839466
## 154 0.0004799481
## 155 0.0004832131
## 156 0.0004824508
## 157 0.0004815008
## 158 0.0004814415
## 159 0.0004796818
## 160 0.0004745929
## 161 0.0004744576
## 162 0.0004772129
## 163 0.0004824393
## 164 0.0004846023
## 165 0.0004864769
## 166 0.0004827042
## 167 0.0004811560
## 168 0.0004795921
## 169 0.0004803222
## 170 0.0004788101
## 171 0.0004816877
## 172 0.0004777082
## 173 0.0004719412
## 174 0.0004694474
## 175 0.0004700549
## 176 0.0004695117
## 177 0.0004685173
## 178 0.0004675453
## 179 0.0004681852
## 180 0.0004663570
## 181 0.0004658952
## 182 0.0004667593
## 183 0.0004634320
## 184 0.0004624309
## 185 0.0004607057
## 186 0.0004633813
## 187 0.0004658046
## 188 0.0004668368
## 189 0.0004656122
## 190 0.0004663333
## 191 0.0004656834
## 192 0.0004611838
## 193 0.0004587345
## 194 0.0004599118
## 195 0.0004600094
## 196 0.0004588865
## 197 0.0004572037
## 198 0.0004579756
## 199 0.0004577390
## 200 0.0004571317
## 201 0.0004572989
## 202 0.0004594327
## 203 0.0004613038
## 204 0.0004628059
## 205 0.0004610132
## 206 0.0004624653
## 207 0.0004623724
## 208 0.0004618015
## 209 0.0004626457
## 210 0.0004614122
## 211 0.0004599133
## 212 0.0004596942
## 213 0.0004585000
## 214 0.0004587441
## 215 0.0004590452
## 216 0.0004592844
## 217 0.0004582023
## 218 0.0004573929
## 219 0.0004570573
## 220 0.0004571941
## 221 0.0004565113
## 222 0.0004572523
## 223 0.0004576748
## 224 0.0004583151
## 225 0.0004584123
## 226 0.0004584486
## 227 0.0004586430
## 228 0.0004581193
## 229 0.0004587834
## 230 0.0004590040
## 231 0.0004589746
## 232 0.0004591041
## 233 0.0004590426
## 234 0.0004590140
## 235 0.0004591266
## 236 0.0004590646
## 237 0.0004590514
## 238 0.0004589379
## 239 0.0004589351
## 240 0.0004589293
## [1] "Best Model"
##    nvmax
## 13    13

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

## [1] "Coefficients of final model:"
##                  Estimate         2.5 %        97.5 %
## (Intercept)  1.980913e+00  1.974679e+00  1.987147e+00
## x4          -6.190952e-05 -7.633876e-05 -4.748029e-05
## x7           1.210509e-02  1.109182e-02  1.311836e-02
## x8           6.003970e-04  3.642962e-04  8.364977e-04
## x9           3.421865e-03  2.896815e-03  3.946916e-03
## x10          1.347223e-03  8.549445e-04  1.839501e-03
## x16          9.846174e-04  6.444391e-04  1.324796e-03
## x17          1.677457e-03  1.155602e-03  2.199311e-03
## x21          1.283488e-04  6.119492e-05  1.955026e-04
## stat4       -6.932958e-04 -1.090276e-03 -2.963155e-04
## stat14      -9.663019e-04 -1.358696e-03 -5.739075e-04
## stat98       3.418433e-03  3.030176e-03  3.806690e-03
## stat110     -3.356195e-03 -3.747807e-03 -2.964582e-03
## sqrt.x18     2.652400e-02  2.502231e-02  2.802568e-02

Test

if (algo.backward.caret == TRUE){
  test.model(model.backward, data.test
             ,method = 'leapBackward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.033   2.082   2.094   2.095   2.108   2.145 
## [1] "leapBackward  Test MSE: 0.00104648971088915"

Stepwise Selection (w/ full train)

Train

if (algo.stepwise == TRUE){
  t1 = Sys.time()
  
  model.stepwise = step(model.null, scope=list(upper=model.full), data = data.train, direction="both", trace = 0)
  print(summary(model.stepwise))

  t2 = Sys.time()
  print (paste("Time taken for Stepwise Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.stepwise, data.train)
}

Test

if (algo.stepwise == TRUE){
  test.model(model.stepwise, data.test, "Stepwise Selection")
}

Stepwise Selection (w/ filtered train)

Train

if (algo.stepwise == TRUE){
  t1 = Sys.time()
  
  model.stepwise2 = step(model.null2, scope=list(upper=model.full2), data = data.train2, direction="both", trace = 0)
  print(summary(model.stepwise2))

  t2 = Sys.time()
  print (paste("Time taken for Stepwise Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.stepwise2, data.train2)
}

Test

if (algo.stepwise == TRUE){
  test.model(model.stepwise2, data.test, "Stepwise Selection (2)")
}

Stepwise Selection with CV (w/ full train)

Train

if (algo.stepwise.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "leapSeq"
                                   ,feature.names = feature.names)
  model.stepwise = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 8 on full training set
## [1] "All models results"
##     nvmax       RMSE  Rsquared        MAE       RMSESD RsquaredSD
## 1       1 0.03397144 0.1129785 0.02646469 0.0013665043 0.03258230
## 2       2 0.03314757 0.1546431 0.02580411 0.0012740101 0.03550349
## 3       3 0.03253002 0.1853842 0.02520471 0.0011372433 0.03345947
## 4       4 0.03198437 0.2123327 0.02446596 0.0012175414 0.03861630
## 5       5 0.03168250 0.2267382 0.02423192 0.0011860975 0.03411053
## 6       6 0.03158563 0.2313478 0.02414987 0.0011414356 0.03245483
## 7       7 0.03151956 0.2344940 0.02412544 0.0011712320 0.03109860
## 8       8 0.03147259 0.2368539 0.02410171 0.0011684971 0.03058939
## 9       9 0.03150072 0.2354524 0.02413376 0.0011819948 0.03129068
## 10     10 0.03148114 0.2363709 0.02412322 0.0011947180 0.03116077
## 11     11 0.03149740 0.2356756 0.02413578 0.0012131034 0.03187730
## 12     12 0.03148258 0.2364391 0.02413605 0.0012441239 0.03330571
## 13     13 0.03150559 0.2353915 0.02416300 0.0012535464 0.03181309
## 14     14 0.03152527 0.2344496 0.02418075 0.0012406941 0.03226716
## 15     15 0.03157088 0.2322994 0.02420247 0.0012414885 0.03132019
## 16     16 0.03160219 0.2308665 0.02423099 0.0012488655 0.03165040
## 17     17 0.03160186 0.2309332 0.02424001 0.0012463632 0.03189616
## 18     18 0.03159431 0.2313285 0.02424619 0.0012538830 0.03241756
## 19     19 0.03162018 0.2301391 0.02426857 0.0012716976 0.03311663
## 20     20 0.03162195 0.2300332 0.02427172 0.0012578150 0.03327049
## 21     21 0.03163933 0.2293298 0.02428201 0.0012677561 0.03427661
## 22     22 0.03166012 0.2284369 0.02429724 0.0012649398 0.03482626
## 23     23 0.03202129 0.2092281 0.02454510 0.0010055395 0.06041112
## 24     24 0.03198986 0.2115942 0.02456839 0.0016400746 0.06685009
## 25     25 0.03194607 0.2137112 0.02455923 0.0015657523 0.06226366
## 26     26 0.03235518 0.1942789 0.02483540 0.0021335973 0.07192205
## 27     27 0.03207205 0.2072655 0.02464553 0.0013334883 0.06636147
## 28     28 0.03237442 0.1914470 0.02481549 0.0013682747 0.07796521
## 29     29 0.03238620 0.1929211 0.02486411 0.0021177592 0.07093217
## 30     30 0.03231531 0.1946920 0.02486498 0.0016150559 0.07782524
## 31     31 0.03234906 0.1944910 0.02484682 0.0019209672 0.06587868
## 32     32 0.03209988 0.2061575 0.02465723 0.0013160658 0.06531073
## 33     33 0.03174024 0.2251785 0.02434938 0.0012700650 0.03418531
## 34     34 0.03196447 0.2133643 0.02454458 0.0016433081 0.05982646
## 35     35 0.03201415 0.2108510 0.02455907 0.0016299860 0.06476054
## 36     36 0.03201606 0.2115075 0.02461974 0.0016222544 0.05417282
## 37     37 0.03173184 0.2256570 0.02433181 0.0012380114 0.03304570
## 38     38 0.03175403 0.2247120 0.02434040 0.0012245063 0.03279859
## 39     39 0.03212322 0.2053842 0.02464323 0.0012694112 0.06441284
## 40     40 0.03270379 0.1758443 0.02520865 0.0017115115 0.08189468
## 41     41 0.03229253 0.1966465 0.02479776 0.0018654882 0.07808008
## 42     42 0.03256822 0.1813507 0.02509205 0.0016245919 0.08677484
## 43     43 0.03234453 0.1949514 0.02490355 0.0018130886 0.06992059
## 44     44 0.03228191 0.1972696 0.02477331 0.0018867830 0.07846407
## 45     45 0.03273590 0.1742831 0.02520864 0.0016585721 0.08311510
## 46     46 0.03174615 0.2253463 0.02434959 0.0012088292 0.03229797
## 47     47 0.03173511 0.2258958 0.02434447 0.0012040875 0.03243150
## 48     48 0.03208226 0.2074473 0.02462154 0.0012793084 0.06429762
## 49     49 0.03208987 0.2069998 0.02457693 0.0009340056 0.05739682
## 50     50 0.03257720 0.1828130 0.02512719 0.0020361858 0.08307310
## 51     51 0.03264974 0.1794417 0.02508705 0.0021606223 0.08361111
## 52     52 0.03227353 0.1979421 0.02480931 0.0018363301 0.07712854
## 53     53 0.03204745 0.2105357 0.02465335 0.0016015778 0.05356918
## 54     54 0.03208363 0.2094521 0.02460830 0.0019892371 0.05478693
## 55     55 0.03175653 0.2250892 0.02436104 0.0012192166 0.03307973
## 56     56 0.03197580 0.2133971 0.02454075 0.0016231522 0.06006953
## 57     57 0.03250467 0.1872345 0.02494131 0.0016420454 0.06947366
## 58     58 0.03176642 0.2247808 0.02436947 0.0012372651 0.03272895
## 59     59 0.03209724 0.2077980 0.02461937 0.0016084244 0.06406158
## 60     60 0.03225440 0.1988411 0.02482633 0.0017562295 0.07654178
## 61     61 0.03178743 0.2238745 0.02437682 0.0012453426 0.03398422
## 62     62 0.03209370 0.2080532 0.02465213 0.0015935129 0.06317445
## 63     63 0.03180442 0.2231940 0.02439513 0.0012477128 0.03429233
## 64     64 0.03239497 0.1924571 0.02488990 0.0018253300 0.07973139
## 65     65 0.03231743 0.1961741 0.02481973 0.0018791952 0.07828043
## 66     66 0.03235213 0.1943228 0.02485517 0.0017850910 0.07690053
## 67     67 0.03218034 0.2034527 0.02463957 0.0009445690 0.05801577
## 68     68 0.03211121 0.2080320 0.02470146 0.0016315130 0.05419764
## 69     69 0.03217036 0.2041151 0.02471240 0.0013512624 0.06623154
## 70     70 0.03307507 0.1587109 0.02544868 0.0023038657 0.08915736
## 71     71 0.03220237 0.2017189 0.02477215 0.0017861025 0.07272222
## 72     72 0.03245716 0.1895430 0.02493487 0.0017464019 0.07837400
## 73     73 0.03205587 0.2103592 0.02460654 0.0016347342 0.05972933
## 74     74 0.03246820 0.1890805 0.02493744 0.0017342174 0.07799535
## 75     75 0.03187467 0.2204506 0.02443069 0.0012582039 0.03443531
## 76     76 0.03287227 0.1677752 0.02519389 0.0011427701 0.08535063
## 77     77 0.03248156 0.1883004 0.02495667 0.0015847839 0.07990003
## 78     78 0.03253839 0.1857136 0.02492601 0.0012400170 0.07385677
## 79     79 0.03206362 0.2100981 0.02461187 0.0015056010 0.05706150
## 80     80 0.03288787 0.1662779 0.02531949 0.0018992997 0.09521491
## 81     81 0.03231318 0.1981335 0.02471890 0.0009567826 0.05899835
## 82     82 0.03193266 0.2181648 0.02447590 0.0013172185 0.03536867
## 83     83 0.03252735 0.1883156 0.02496846 0.0022076119 0.07298247
## 84     84 0.03192813 0.2183703 0.02447047 0.0013018534 0.03579461
## 85     85 0.03228105 0.2014714 0.02473703 0.0020454424 0.05635946
## 86     86 0.03226739 0.2003566 0.02475421 0.0013816267 0.06572313
## 87     87 0.03320490 0.1526152 0.02553157 0.0021401254 0.08924897
## 88     88 0.03226708 0.2004426 0.02474622 0.0013945213 0.06560004
## 89     89 0.03249043 0.1882265 0.02487844 0.0011658231 0.07025245
## 90     90 0.03193229 0.2182222 0.02446204 0.0013184467 0.03459768
## 91     91 0.03222151 0.2038259 0.02475144 0.0016438930 0.05270147
## 92     92 0.03235592 0.1977022 0.02479393 0.0016889704 0.04987106
## 93     93 0.03243507 0.1916355 0.02492059 0.0018540200 0.07537843
## 94     94 0.03231061 0.2003363 0.02477687 0.0020229483 0.05553980
## 95     95 0.03231055 0.2003194 0.02477819 0.0020251898 0.05565580
## 96     96 0.03264572 0.1825594 0.02503708 0.0018758163 0.06744941
## 97     97 0.03198619 0.2158996 0.02451695 0.0012900049 0.03468594
## 98     98 0.03236545 0.1960874 0.02475308 0.0009475952 0.05806822
## 99     99 0.03198940 0.2158217 0.02452306 0.0012977946 0.03506144
## 100   100 0.03200050 0.2153558 0.02452841 0.0013002071 0.03489076
## 101   101 0.03226867 0.2018089 0.02480616 0.0016261819 0.05300986
## 102   102 0.03276983 0.1743701 0.02521366 0.0018600097 0.08664441
## 103   103 0.03240496 0.1956550 0.02483861 0.0016783260 0.04966427
## 104   104 0.03253412 0.1870516 0.02499378 0.0017762151 0.07541911
## 105   105 0.03260932 0.1849845 0.02504207 0.0022066133 0.07304406
## 106   106 0.03257383 0.1849452 0.02494858 0.0011619634 0.07099751
## 107   107 0.03230001 0.1998808 0.02477270 0.0016032122 0.06205304
## 108   108 0.03220124 0.2045308 0.02470909 0.0015111248 0.05727569
## 109   109 0.03321957 0.1524036 0.02554005 0.0018342737 0.08417204
## 110   110 0.03237658 0.1962628 0.02483302 0.0015143903 0.06209759
## 111   111 0.03230562 0.1996510 0.02477668 0.0015948260 0.06188356
## 112   112 0.03239692 0.1967811 0.02484798 0.0020323597 0.05609423
## 113   113 0.03277093 0.1772153 0.02508138 0.0017489574 0.06772852
## 114   114 0.03220959 0.2041436 0.02471735 0.0014975866 0.05672452
## 115   115 0.03237076 0.1965815 0.02483636 0.0015136889 0.06225739
## 116   116 0.03244835 0.1939787 0.02488487 0.0016892690 0.05082620
## 117   117 0.03276316 0.1763753 0.02521713 0.0023248785 0.08005867
## 118   118 0.03304516 0.1633533 0.02537148 0.0018885459 0.07081310
## 119   119 0.03237697 0.1964042 0.02484704 0.0015040425 0.06220255
## 120   120 0.03254265 0.1886842 0.02502784 0.0017338566 0.05454720
## 121   121 0.03205301 0.2132839 0.02457469 0.0012825409 0.03536407
## 122   122 0.03288902 0.1693972 0.02527432 0.0007554946 0.06841954
## 123   123 0.03247883 0.1913643 0.02497377 0.0016947886 0.06231732
## 124   124 0.03229421 0.2019332 0.02477698 0.0017774589 0.04501978
## 125   125 0.03250656 0.1888725 0.02498241 0.0014133530 0.06662260
## 126   126 0.03278665 0.1759732 0.02520768 0.0018577898 0.07109759
## 127   127 0.03244749 0.1925979 0.02497618 0.0016384742 0.05809510
## 128   128 0.03233781 0.1983816 0.02483085 0.0013152260 0.05860200
## 129   129 0.03221223 0.2048110 0.02473287 0.0014892968 0.05405877
## 130   130 0.03205692 0.2132326 0.02457797 0.0013107628 0.03601517
## 131   131 0.03251671 0.1903708 0.02495851 0.0019305730 0.06068296
## 132   132 0.03260955 0.1831650 0.02497608 0.0008389741 0.05342569
## 133   133 0.03247551 0.1923141 0.02488395 0.0013071346 0.04075348
## 134   134 0.03226833 0.2028029 0.02478500 0.0014602805 0.04395547
## 135   135 0.03219347 0.2060868 0.02470843 0.0013933203 0.04781663
## 136   136 0.03230586 0.2004229 0.02473645 0.0009120580 0.03959132
## 137   137 0.03261033 0.1859116 0.02499490 0.0010985729 0.03610726
## 138   138 0.03239710 0.1953640 0.02490869 0.0014786819 0.05466876
## 139   139 0.03241051 0.1945547 0.02486989 0.0014122543 0.05336783
## 140   140 0.03246156 0.1928194 0.02489076 0.0015351229 0.04910410
## 141   141 0.03210345 0.2113264 0.02459051 0.0012901467 0.03539044
## 142   142 0.03209468 0.2117068 0.02458410 0.0012890430 0.03542907
## 143   143 0.03209306 0.2118097 0.02458615 0.0012888990 0.03537949
## 144   144 0.03264235 0.1836894 0.02508503 0.0016926078 0.06784644
## 145   145 0.03209987 0.2114964 0.02459350 0.0012883969 0.03530163
## 146   146 0.03221771 0.2051766 0.02472050 0.0014144940 0.04827136
## 147   147 0.03230843 0.2011415 0.02481348 0.0015057462 0.04615539
## 148   148 0.03244573 0.1935181 0.02492637 0.0015677698 0.05297558
## 149   149 0.03268819 0.1815586 0.02505961 0.0013053548 0.06332992
## 150   150 0.03241621 0.1954602 0.02486084 0.0017021701 0.04829144
## 151   151 0.03222071 0.2049371 0.02473040 0.0014654563 0.04764652
## 152   152 0.03256781 0.1883220 0.02498878 0.0015793498 0.04662002
## 153   153 0.03265027 0.1837606 0.02506714 0.0014218024 0.05063192
## 154   154 0.03211957 0.2107988 0.02460882 0.0012909070 0.03581454
## 155   155 0.03244164 0.1955739 0.02488679 0.0015175916 0.03939648
## 156   156 0.03268234 0.1823677 0.02511379 0.0016678176 0.06992850
## 157   157 0.03211918 0.2108452 0.02460557 0.0012891136 0.03598560
## 158   158 0.03275655 0.1782427 0.02516307 0.0015383661 0.05441021
## 159   159 0.03273843 0.1797970 0.02513816 0.0014542014 0.04491325
## 160   160 0.03225011 0.2040344 0.02473957 0.0014334333 0.05023636
## 161   161 0.03230911 0.2003280 0.02474913 0.0012242145 0.04886286
## 162   162 0.03224518 0.2036932 0.02470367 0.0013839072 0.04658853
## 163   163 0.03211262 0.2111419 0.02459607 0.0012794154 0.03595391
## 164   164 0.03230973 0.2018578 0.02476906 0.0016500509 0.04063673
## 165   165 0.03232309 0.2002896 0.02479104 0.0013593479 0.04908749
## 166   166 0.03244193 0.1956394 0.02488297 0.0015027212 0.03928401
## 167   167 0.03266581 0.1815094 0.02504729 0.0010015585 0.05694172
## 168   168 0.03211718 0.2110009 0.02460095 0.0012754353 0.03604113
## 169   169 0.03254109 0.1895816 0.02502488 0.0015382047 0.05487598
## 170   170 0.03224553 0.2041129 0.02475205 0.0014790924 0.04930833
## 171   171 0.03211530 0.2111079 0.02460004 0.0012802019 0.03631876
## 172   172 0.03239138 0.1967750 0.02490465 0.0016120237 0.06028783
## 173   173 0.03287351 0.1726804 0.02521433 0.0017922218 0.06540295
## 174   174 0.03259006 0.1874446 0.02495717 0.0012192039 0.05715127
## 175   175 0.03212893 0.2105458 0.02461061 0.0012838308 0.03622762
## 176   176 0.03246556 0.1941241 0.02492835 0.0017792750 0.05341644
## 177   177 0.03258564 0.1885222 0.02502431 0.0017258356 0.06226291
## 178   178 0.03226607 0.2036260 0.02474555 0.0014547414 0.05140301
## 179   179 0.03267297 0.1848605 0.02506156 0.0017092037 0.05618199
## 180   180 0.03254964 0.1885420 0.02497235 0.0009591275 0.04920725
## 181   181 0.03232876 0.2002211 0.02480985 0.0013656055 0.04955832
## 182   182 0.03211694 0.2110792 0.02460413 0.0012806493 0.03623879
## 183   183 0.03211552 0.2111700 0.02460620 0.0012843004 0.03639933
## 184   184 0.03253437 0.1905530 0.02499700 0.0017123529 0.05133939
## 185   185 0.03211282 0.2112932 0.02460637 0.0012839479 0.03665671
## 186   186 0.03250350 0.1913493 0.02493935 0.0015992629 0.05078393
## 187   187 0.03210792 0.2115001 0.02460412 0.0012783199 0.03662367
## 188   188 0.03300576 0.1666651 0.02544174 0.0019681049 0.07772468
## 189   189 0.03210705 0.2115624 0.02460263 0.0012779354 0.03672165
## 190   190 0.03235195 0.1997052 0.02484509 0.0015446309 0.04913405
## 191   191 0.03224723 0.2044631 0.02474650 0.0014495026 0.05193970
## 192   192 0.03210955 0.2114540 0.02460463 0.0012756480 0.03643071
## 193   193 0.03232010 0.2015366 0.02480241 0.0016640620 0.04121148
## 194   194 0.03211473 0.2112343 0.02461162 0.0012745219 0.03640338
## 195   195 0.03225599 0.2041548 0.02475948 0.0014546871 0.05196438
## 196   196 0.03236215 0.1997972 0.02481621 0.0015859561 0.05816354
## 197   197 0.03211448 0.2112530 0.02461544 0.0012739834 0.03644215
## 198   198 0.03253629 0.1886883 0.02493416 0.0007592510 0.05029918
## 199   199 0.03211251 0.2113550 0.02461515 0.0012759686 0.03653119
## 200   200 0.03232869 0.1998765 0.02477976 0.0008953538 0.04007804
## 201   201 0.03225214 0.2037004 0.02472376 0.0013816821 0.04705473
## 202   202 0.03232527 0.1999369 0.02477631 0.0012301931 0.05115154
## 203   203 0.03211040 0.2114433 0.02461549 0.0012761489 0.03650638
## 204   204 0.03225640 0.2038954 0.02476694 0.0015080163 0.05099894
## 205   205 0.03210932 0.2114865 0.02461411 0.0012768122 0.03654330
## 206   206 0.03210991 0.2114623 0.02461523 0.0012762353 0.03649789
## 207   207 0.03225571 0.2042270 0.02476314 0.0014616565 0.05218804
## 208   208 0.03225701 0.2041784 0.02476149 0.0014577455 0.05196430
## 209   209 0.03211434 0.2112763 0.02461896 0.0012756960 0.03639769
## 210   210 0.03243700 0.1961051 0.02488990 0.0015087880 0.03999484
## 211   211 0.03235909 0.1994863 0.02485391 0.0015461169 0.04905449
## 212   212 0.03211224 0.2113639 0.02461477 0.0012736690 0.03630954
## 213   213 0.03211188 0.2113791 0.02461543 0.0012729456 0.03635200
## 214   214 0.03211207 0.2113665 0.02461610 0.0012748892 0.03639020
## 215   215 0.03243504 0.1961961 0.02488722 0.0015117841 0.04007307
## 216   216 0.03211171 0.2113777 0.02461499 0.0012739770 0.03630878
## 217   217 0.03248859 0.1917339 0.02494680 0.0014725706 0.06189575
## 218   218 0.03226635 0.2031277 0.02473286 0.0013994964 0.04812753
## 219   219 0.03211237 0.2113544 0.02461522 0.0012740549 0.03631990
## 220   220 0.03267644 0.1842191 0.02510987 0.0015540605 0.05040123
## 221   221 0.03231282 0.2019066 0.02481001 0.0016496036 0.04069667
## 222   222 0.03235591 0.1993033 0.02483396 0.0013946186 0.05176316
## 223   223 0.03211577 0.2112041 0.02461773 0.0012756530 0.03636885
## 224   224 0.03211624 0.2111851 0.02461784 0.0012752894 0.03637780
## 225   225 0.03243232 0.1950009 0.02489761 0.0016270447 0.05958146
## 226   226 0.03252646 0.1920613 0.02498058 0.0017408356 0.06849885
## 227   227 0.03228135 0.2030151 0.02478470 0.0015450660 0.05239154
## 228   228 0.03227102 0.2037014 0.02477322 0.0014704036 0.05247196
## 229   229 0.03211846 0.2110916 0.02461944 0.0012747558 0.03633198
## 230   230 0.03211878 0.2110758 0.02461948 0.0012746335 0.03632161
## 231   231 0.03234259 0.1993300 0.02478791 0.0008899624 0.04019697
## 232   232 0.03238284 0.1991864 0.02483971 0.0016202031 0.05920517
## 233   233 0.03255830 0.1906761 0.02500844 0.0018317460 0.06931301
## 234   234 0.03211810 0.2111045 0.02461931 0.0012757210 0.03632636
## 235   235 0.03228450 0.2025120 0.02475180 0.0014182596 0.04895250
## 236   236 0.03268906 0.1837651 0.02513208 0.0015681018 0.05087281
## 237   237 0.03274040 0.1813898 0.02521066 0.0020118462 0.06015262
## 238   238 0.03252017 0.1904034 0.02494147 0.0013613988 0.06043976
## 239   239 0.03290863 0.1734272 0.02532189 0.0020831177 0.07148409
## 240   240 0.03211761 0.2111276 0.02461892 0.0012761608 0.03635025
##            MAESD
## 1   0.0007738764
## 2   0.0006800497
## 3   0.0006076308
## 4   0.0006490686
## 5   0.0006124246
## 6   0.0005400859
## 7   0.0005590591
## 8   0.0005646895
## 9   0.0005887279
## 10  0.0006101115
## 11  0.0006157131
## 12  0.0006423545
## 13  0.0006439677
## 14  0.0006360397
## 15  0.0006247845
## 16  0.0006440486
## 17  0.0006440588
## 18  0.0006636083
## 19  0.0006770111
## 20  0.0006540925
## 21  0.0006563192
## 22  0.0006630873
## 23  0.0007373718
## 24  0.0009537927
## 25  0.0010373104
## 26  0.0014456028
## 27  0.0008145995
## 28  0.0009377453
## 29  0.0014252835
## 30  0.0010648507
## 31  0.0012976757
## 32  0.0008137084
## 33  0.0006535130
## 34  0.0010365453
## 35  0.0009181366
## 36  0.0011399535
## 37  0.0006136115
## 38  0.0006039519
## 39  0.0007897916
## 40  0.0013121910
## 41  0.0011838380
## 42  0.0012299622
## 43  0.0013580263
## 44  0.0012204730
## 45  0.0012949221
## 46  0.0006256025
## 47  0.0006106359
## 48  0.0008159814
## 49  0.0006632204
## 50  0.0015383582
## 51  0.0015045910
## 52  0.0013068942
## 53  0.0011499888
## 54  0.0012922444
## 55  0.0006384228
## 56  0.0010577243
## 57  0.0011115126
## 58  0.0006504522
## 59  0.0009449109
## 60  0.0012851766
## 61  0.0006539177
## 62  0.0010891755
## 63  0.0006509167
## 64  0.0012061101
## 65  0.0011864111
## 66  0.0012577303
## 67  0.0006606812
## 68  0.0011254580
## 69  0.0008683038
## 70  0.0016130960
## 71  0.0012287366
## 72  0.0011681224
## 73  0.0010442801
## 74  0.0011637366
## 75  0.0006429765
## 76  0.0008881597
## 77  0.0011450005
## 78  0.0010063637
## 79  0.0009594859
## 80  0.0013074725
## 81  0.0006879370
## 82  0.0006875623
## 83  0.0015285659
## 84  0.0006670910
## 85  0.0013393067
## 86  0.0008963540
## 87  0.0015145927
## 88  0.0009113945
## 89  0.0009201013
## 90  0.0006666507
## 91  0.0010966445
## 92  0.0010182038
## 93  0.0012673648
## 94  0.0013274524
## 95  0.0013221151
## 96  0.0011550018
## 97  0.0006374032
## 98  0.0006472460
## 99  0.0006453306
## 100 0.0006410990
## 101 0.0010731790
## 102 0.0013868574
## 103 0.0010178731
## 104 0.0012590385
## 105 0.0015192774
## 106 0.0009058128
## 107 0.0009197108
## 108 0.0009468351
## 109 0.0012331924
## 110 0.0010300489
## 111 0.0009200092
## 112 0.0013187940
## 113 0.0012736618
## 114 0.0009274848
## 115 0.0010283982
## 116 0.0010392038
## 117 0.0016286457
## 118 0.0014389301
## 119 0.0010192462
## 120 0.0011499447
## 121 0.0006257165
## 122 0.0008270520
## 123 0.0011515898
## 124 0.0011043546
## 125 0.0008833207
## 126 0.0012739788
## 127 0.0010775451
## 128 0.0008139400
## 129 0.0009309224
## 130 0.0006435399
## 131 0.0012640249
## 132 0.0004726736
## 133 0.0008583331
## 134 0.0008901951
## 135 0.0008313284
## 136 0.0005086458
## 137 0.0007360376
## 138 0.0009721282
## 139 0.0008335056
## 140 0.0009101020
## 141 0.0006169933
## 142 0.0006187930
## 143 0.0006221491
## 144 0.0011533964
## 145 0.0006187074
## 146 0.0008463235
## 147 0.0009276090
## 148 0.0009528792
## 149 0.0009475718
## 150 0.0010026282
## 151 0.0008677980
## 152 0.0009356718
## 153 0.0010216885
## 154 0.0006256766
## 155 0.0009102624
## 156 0.0011615835
## 157 0.0006301424
## 158 0.0009925667
## 159 0.0010297840
## 160 0.0008869609
## 161 0.0006313200
## 162 0.0007078418
## 163 0.0006272397
## 164 0.0009961206
## 165 0.0008511297
## 166 0.0009207739
## 167 0.0007215942
## 168 0.0006194572
## 169 0.0010526748
## 170 0.0009066164
## 171 0.0006265944
## 172 0.0010997995
## 173 0.0011491782
## 174 0.0008352811
## 175 0.0006240930
## 176 0.0011751240
## 177 0.0011407119
## 178 0.0008938584
## 179 0.0011108943
## 180 0.0007450172
## 181 0.0008721553
## 182 0.0006200659
## 183 0.0006230323
## 184 0.0011588694
## 185 0.0006208586
## 186 0.0009996519
## 187 0.0006176807
## 188 0.0013448425
## 189 0.0006161652
## 190 0.0009555209
## 191 0.0008984352
## 192 0.0006135112
## 193 0.0010293378
## 194 0.0006140730
## 195 0.0009018254
## 196 0.0009920653
## 197 0.0006152132
## 198 0.0004707257
## 199 0.0006112505
## 200 0.0005018666
## 201 0.0006797135
## 202 0.0006410603
## 203 0.0006118951
## 204 0.0009022122
## 205 0.0006134732
## 206 0.0006128079
## 207 0.0009060516
## 208 0.0008973706
## 209 0.0006120307
## 210 0.0009043541
## 211 0.0009377055
## 212 0.0006093801
## 213 0.0006084648
## 214 0.0006088704
## 215 0.0009071547
## 216 0.0006093406
## 217 0.0009274305
## 218 0.0006974602
## 219 0.0006090568
## 220 0.0010620469
## 221 0.0010328612
## 222 0.0008923074
## 223 0.0006102335
## 224 0.0006098857
## 225 0.0009608288
## 226 0.0011842344
## 227 0.0009344626
## 228 0.0009136694
## 229 0.0006076066
## 230 0.0006072950
## 231 0.0004996468
## 232 0.0010334983
## 233 0.0012270055
## 234 0.0006080520
## 235 0.0007207015
## 236 0.0010990462
## 237 0.0013374217
## 238 0.0007565107
## 239 0.0013983020
## 240 0.0006084272
## [1] "Best Model"
##   nvmax
## 8     8

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

## [1] "Coefficients of final model:"
##                  Estimate         2.5 %        97.5 %
## (Intercept)  2.000745e+00  1.9944123678  2.007078e+00
## x4          -5.339736e-05 -0.0000708296 -3.596511e-05
## x7           1.126502e-02  0.0100390043  1.249104e-02
## x9           3.419457e-03  0.0027827248  4.056190e-03
## x16          8.893311e-04  0.0004770930  1.301569e-03
## x17          1.569187e-03  0.0009371998  2.201173e-03
## stat98       3.679003e-03  0.0032090828  4.148922e-03
## stat110     -3.396195e-03 -0.0038709613 -2.921428e-03
## sqrt.x18     2.634127e-02  0.0245190385  2.816351e-02

Test

if (algo.stepwise.caret == TRUE){
  test.model(model.stepwise, data.test
             ,method = 'leapSeq',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
  
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.042   2.086   2.098   2.098   2.111   2.145 
## [1] "leapSeq  Test MSE: 0.00104706068550194"

Stepwise Selection with CV (w/ filtered train)

Train

Test

LASSO (w/ full train)

Train

if(algo.LASSO == TRUE){
  # Formatting data for GLM net
  # you can use model.matrix as well -- model.matrix creates a design (or model) matrix, 
  # e.g., by expanding factors to a set of dummy variables (depending on the contrasts) 
  # and expanding interactions similarly.
  x = as.matrix(data.train[,feature.names])
  y = data.train[,label.names]
  
  xtest = as.matrix(data.test[,feature.names]) 
  ytest = data.test[,label.names] 
  
  grid=10^seq(10,-2, length =100)
  
  set.seed(1)
  model.LASSO=glmnet(x,y,alpha=1, lambda =grid)
  
  cv.out=cv.glmnet(x,y,alpha=1) # alpha=1 performs LASSO
  plot(cv.out)
  bestlambda<-cv.out$lambda.min  # Optimal penalty parameter.  You can make this call visually.
  
  print(coef(model.LASSO,s=bestlambda))
}

Test

if(algo.LASSO == TRUE){
  lasso.pred=predict (model.LASSO ,s=bestlambda ,newx=xtest)
  
  testMSE_LASSO = mean((ytest-lasso.pred)^2)
  print (paste("LASSO Test RMSE: ",testMSE_LASSO, sep=""))
  
  plot(ytest,lasso.pred)
}

LASSO (w/ filtered train)

Train

if(algo.LASSO == TRUE){
  # Formatting data for GLM net
  # you can use model.matrix as well -- model.matrix creates a design (or model) matrix, 
  # e.g., by expanding factors to a set of dummy variables (depending on the contrasts) 
  # and expanding interactions similarly.
  x = as.matrix(data.train2[,feature.names])
  y = data.train2[,label.names]
  
  xtest = as.matrix(data.test[,feature.names]) 
  ytest = data.test[,label.names] 
  
  grid=10^seq(10,-2, length =100)
  
  set.seed(1)
  model.LASSO=glmnet(x,y,alpha=1, lambda =grid)
  
  cv.out=cv.glmnet(x,y,alpha=1) # alpha=1 performs LASSO
  plot(cv.out)
  bestlambda<-cv.out$lambda.min  # Optimal penalty parameter.  You can make this call visually.
  
  print(coef(model.LASSO,s=bestlambda))
}

Test

if(algo.LASSO == TRUE){
  lasso.pred=predict (model.LASSO ,s=bestlambda ,newx=xtest)  
  
  testMSE_LASSO = mean((ytest-lasso.pred)^2)
  print (paste("LASSO Test RMSE: ",testMSE_LASSO, sep=""))
  
  plot(ytest,lasso.pred)
}

LASSO with CV (w/ full train)

Train

if (algo.LASSO.caret == TRUE){
  set.seed(1)
  debugonce(train.caret.glmselect)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "glmnet"
                                   ,subopt = 'LASSO'
                                   ,feature.names = feature.names)
  model.LASSO.caret = returned$model
}
## debugging in: train.caret.glmselect(formula = formula, data = data.train, method = "glmnet", 
##     subopt = "LASSO", feature.names = feature.names)
## debug at <text>#75: {
##     if (is.null(train.control)) {
##         train.control <- trainControl(method = "cv", number = 10, 
##             seeds = setCaretSeeds(method = "cv", numbers = 10, 
##                 seed = 1701), search = "grid", verboseIter = TRUE, 
##             allowParallel = TRUE)
##     }
##     if (is.null(tune.grid)) {
##         if (method == "leapForward" | method == "leapBackward" | 
##             method == "leapSeq") {
##             tune.grid = data.frame(nvmax = 1:length(feature.names))
##         }
##         if (method == "glmnet" && subopt == "LASSO") {
##             lambda = 10^seq(-2, 0, length = 100)
##             alpha = c(1)
##             tune.grid = expand.grid(alpha = alpha, lambda = lambda)
##         }
##         if (method == "lars") {
##             fraction = seq(0, 1, length = 100)
##             tune.grid = expand.grid(fraction = fraction)
##             pre.proc = c("center", "scale")
##         }
##     }
##     cl <- makeCluster(detectCores() * 0.75)
##     registerDoParallel(cl)
##     set.seed(1)
##     model.caret <- caret::train(formula, data = data, method = method, 
##         tuneGrid = tune.grid, trControl = train.control, preProc = pre.proc)
##     stopCluster(cl)
##     registerDoSEQ()
##     if (method == "leapForward" | method == "leapBackward" | 
##         method == "leapSeq") {
##         print("All models results")
##         print(model.caret$results)
##         print("Best Model")
##         print(model.caret$bestTune)
##         model = model.caret$finalModel
##         dataPlot = model.caret$results %>% gather(key = "metric", 
##             value = "value", -nvmax) %>% dplyr::filter(metric %in% 
##             c("MAE", "RMSE", "Rsquared"))
##         metricsPlot = ggplot(data = dataPlot, aes(x = nvmax, 
##             y = value)) + geom_line(color = "lightblue4") + geom_point(color = "blue", 
##             alpha = 0.7, size = 0.9) + facet_wrap(~metric, ncol = 4, 
##             scales = "free_y") + theme_light()
##         plot(metricsPlot)
##         dataPlot = data.frame(pred = predict(model.caret, data), 
##             res = resid(model.caret))
##         residPlot = ggplot(dataPlot, aes(x = pred, y = res)) + 
##             geom_point(color = "light blue", alpha = 0.7) + geom_smooth() + 
##             theme_light()
##         plot(residPlot)
##         residHistogram = ggplot(dataPlot, aes(x = res)) + geom_histogram(aes(y = ..density..), 
##             fill = "light blue", alpha = 1) + geom_density(color = "lightblue4") + 
##             theme_light()
##         plot(residHistogram)
##         id = rownames(model.caret$bestTune)
##         print("Coefficients of final model:")
##         coefs <- coef(model, id = id)
##         nams <- names(coefs)
##         nams <- nams[!nams %in% "(Intercept)"]
##         response <- as.character(formula[[2]])
##         form <- as.formula(paste(response, paste(nams, collapse = " + "), 
##             sep = " ~ "))
##         mod <- lm(form, data = data)
##         print(car::Confint(mod))
##         return(list(model = model, id = id, residPlot = residPlot, 
##             residHistogram = residHistogram, modelLM = mod))
##     }
##     if (method == "glmnet" && subopt == "LASSO") {
##         print(model.caret)
##         print(plot(model.caret))
##         print(model.caret$bestTune)
##         print(model.caret$results)
##         model = model.caret$finalModel
##         dataPlot = model.caret$results %>% gather(key = "metric", 
##             value = "value", -lambda) %>% dplyr::filter(metric %in% 
##             c("MAE", "RMSE", "Rsquared"))
##         metricsPlot = ggplot(data = dataPlot, aes(x = lambda, 
##             y = value)) + geom_line(color = "lightblue4") + geom_point(color = "blue", 
##             alpha = 0.7, size = 0.9) + facet_wrap(~metric, ncol = 4, 
##             scales = "free_y") + theme_light()
##         plot(metricsPlot)
##         dataPlot = data.frame(pred = predict(model.caret, data), 
##             res = resid(model.caret))
##         residPlot = ggplot(dataPlot, aes(x = pred, y = res)) + 
##             geom_point(color = "light blue", alpha = 0.7) + geom_smooth() + 
##             theme_light()
##         plot(residPlot)
##         residHistogram = ggplot(dataPlot, aes(x = res)) + geom_histogram(aes(y = ..density..), 
##             fill = "light blue", alpha = 1) + geom_density(color = "lightblue4") + 
##             theme_light()
##         plot(residHistogram)
##         print("Coefficients")
##         t = coef(model, s = model.caret$bestTune$lambda)
##         model.coef = t[which(t[, 1] != 0), ]
##         print(as.data.frame(model.coef))
##         id = NULL
##         return(list(model = model.caret, id = id, residPlot = residPlot, 
##             metricsPlot = metricsPlot))
##     }
##     if (method == "lars") {
##         print(model.caret)
##         print(plot(model.caret))
##         print(model.caret$bestTune)
##         dataPlot = model.caret$results %>% gather(key = "metric", 
##             value = "value", -fraction) %>% dplyr::filter(metric %in% 
##             c("MAE", "RMSE", "Rsquared"))
##         metricsPlot = ggplot(data = dataPlot, aes(x = fraction, 
##             y = value)) + geom_line(color = "lightblue4") + geom_point(color = "blue", 
##             alpha = 0.7, size = 0.9) + facet_wrap(~metric, ncol = 4, 
##             scales = "free_y") + theme_light()
##         plot(metricsPlot)
##         dataPlot = data.frame(pred = predict(model.caret, data), 
##             res = resid(model.caret))
##         residPlot = ggplot(dataPlot, aes(x = pred, y = res)) + 
##             geom_point(color = "light blue", alpha = 0.7) + geom_smooth() + 
##             theme_light()
##         plot(residPlot)
##         residHistogram = ggplot(dataPlot, aes(x = res)) + geom_histogram(aes(y = ..density..), 
##             fill = "light blue", alpha = 1) + geom_density(color = "lightblue4") + 
##             theme_light()
##         plot(residHistogram)
##         print("Coefficients")
##         t = coef(model.caret$finalModel, s = model.caret$bestTune$fraction, 
##             mode = "fraction")
##         model.coef = t[which(t != 0)]
##         print(model.coef)
##         id = NULL
##         return(list(model = model.caret, id = id, residPlot = residPlot, 
##             residHistogram = residHistogram))
##     }
## }
## debug at <text>#79: if (is.null(train.control)) {
##     train.control <- trainControl(method = "cv", number = 10, 
##         seeds = setCaretSeeds(method = "cv", numbers = 10, seed = 1701), 
##         search = "grid", verboseIter = TRUE, allowParallel = TRUE)
## }
## debug at <text>#80: train.control <- trainControl(method = "cv", number = 10, seeds = setCaretSeeds(method = "cv", 
##     numbers = 10, seed = 1701), search = "grid", verboseIter = TRUE, 
##     allowParallel = TRUE)
## debug at <text>#91: if (is.null(tune.grid)) {
##     if (method == "leapForward" | method == "leapBackward" | 
##         method == "leapSeq") {
##         tune.grid = data.frame(nvmax = 1:length(feature.names))
##     }
##     if (method == "glmnet" && subopt == "LASSO") {
##         lambda = 10^seq(-2, 0, length = 100)
##         alpha = c(1)
##         tune.grid = expand.grid(alpha = alpha, lambda = lambda)
##     }
##     if (method == "lars") {
##         fraction = seq(0, 1, length = 100)
##         tune.grid = expand.grid(fraction = fraction)
##         pre.proc = c("center", "scale")
##     }
## }
## debug at <text>#92: if (method == "leapForward" | method == "leapBackward" | method == 
##     "leapSeq") {
##     tune.grid = data.frame(nvmax = 1:length(feature.names))
## }
## debug at <text>#95: if (method == "glmnet" && subopt == "LASSO") {
##     lambda = 10^seq(-2, 0, length = 100)
##     alpha = c(1)
##     tune.grid = expand.grid(alpha = alpha, lambda = lambda)
## }
## debug at <text>#99: lambda = 10^seq(-2, 0, length = 100)
## debug at <text>#100: alpha = c(1)
## debug at <text>#101: tune.grid = expand.grid(alpha = alpha, lambda = lambda)
## debug at <text>#103: if (method == "lars") {
##     fraction = seq(0, 1, length = 100)
##     tune.grid = expand.grid(fraction = fraction)
##     pre.proc = c("center", "scale")
## }
## debug at <text>#112: cl <- makeCluster(detectCores() * 0.75)
## debug at <text>#113: registerDoParallel(cl)
## debug at <text>#115: set.seed(1)
## debug at <text>#118: model.caret <- caret::train(formula, data = data, method = method, 
##     tuneGrid = tune.grid, trControl = train.control, preProc = pre.proc)
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.01 on full training set
## debug at <text>#126: stopCluster(cl)
## debug at <text>#127: registerDoSEQ()
## debug at <text>#129: if (method == "leapForward" | method == "leapBackward" | method == 
##     "leapSeq") {
##     print("All models results")
##     print(model.caret$results)
##     print("Best Model")
##     print(model.caret$bestTune)
##     model = model.caret$finalModel
##     dataPlot = model.caret$results %>% gather(key = "metric", 
##         value = "value", -nvmax) %>% dplyr::filter(metric %in% 
##         c("MAE", "RMSE", "Rsquared"))
##     metricsPlot = ggplot(data = dataPlot, aes(x = nvmax, y = value)) + 
##         geom_line(color = "lightblue4") + geom_point(color = "blue", 
##         alpha = 0.7, size = 0.9) + facet_wrap(~metric, ncol = 4, 
##         scales = "free_y") + theme_light()
##     plot(metricsPlot)
##     dataPlot = data.frame(pred = predict(model.caret, data), 
##         res = resid(model.caret))
##     residPlot = ggplot(dataPlot, aes(x = pred, y = res)) + geom_point(color = "light blue", 
##         alpha = 0.7) + geom_smooth() + theme_light()
##     plot(residPlot)
##     residHistogram = ggplot(dataPlot, aes(x = res)) + geom_histogram(aes(y = ..density..), 
##         fill = "light blue", alpha = 1) + geom_density(color = "lightblue4") + 
##         theme_light()
##     plot(residHistogram)
##     id = rownames(model.caret$bestTune)
##     print("Coefficients of final model:")
##     coefs <- coef(model, id = id)
##     nams <- names(coefs)
##     nams <- nams[!nams %in% "(Intercept)"]
##     response <- as.character(formula[[2]])
##     form <- as.formula(paste(response, paste(nams, collapse = " + "), 
##         sep = " ~ "))
##     mod <- lm(form, data = data)
##     print(car::Confint(mod))
##     return(list(model = model, id = id, residPlot = residPlot, 
##         residHistogram = residHistogram, modelLM = mod))
## }
## debug at <text>#181: if (method == "glmnet" && subopt == "LASSO") {
##     print(model.caret)
##     print(plot(model.caret))
##     print(model.caret$bestTune)
##     print(model.caret$results)
##     model = model.caret$finalModel
##     dataPlot = model.caret$results %>% gather(key = "metric", 
##         value = "value", -lambda) %>% dplyr::filter(metric %in% 
##         c("MAE", "RMSE", "Rsquared"))
##     metricsPlot = ggplot(data = dataPlot, aes(x = lambda, y = value)) + 
##         geom_line(color = "lightblue4") + geom_point(color = "blue", 
##         alpha = 0.7, size = 0.9) + facet_wrap(~metric, ncol = 4, 
##         scales = "free_y") + theme_light()
##     plot(metricsPlot)
##     dataPlot = data.frame(pred = predict(model.caret, data), 
##         res = resid(model.caret))
##     residPlot = ggplot(dataPlot, aes(x = pred, y = res)) + geom_point(color = "light blue", 
##         alpha = 0.7) + geom_smooth() + theme_light()
##     plot(residPlot)
##     residHistogram = ggplot(dataPlot, aes(x = res)) + geom_histogram(aes(y = ..density..), 
##         fill = "light blue", alpha = 1) + geom_density(color = "lightblue4") + 
##         theme_light()
##     plot(residHistogram)
##     print("Coefficients")
##     t = coef(model, s = model.caret$bestTune$lambda)
##     model.coef = t[which(t[, 1] != 0), ]
##     print(as.data.frame(model.coef))
##     id = NULL
##     return(list(model = model.caret, id = id, residPlot = residPlot, 
##         metricsPlot = metricsPlot))
## }
## debug at <text>#182: print(model.caret)
## glmnet 
## 
## 5584 samples
##  240 predictor
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5026, 5026, 5026, 5025, 5025, 5026, ... 
## Resampling results across tuning parameters:
## 
##   lambda      RMSE        Rsquared    MAE       
##   0.01000000  0.03541645  0.11297848  0.02749408
##   0.01047616  0.03555384  0.11297848  0.02759347
##   0.01097499  0.03570400  0.11297848  0.02770139
##   0.01149757  0.03586807  0.11297848  0.02782082
##   0.01204504  0.03600398  0.07466676  0.02791938
##   0.01261857  0.03601844         NaN  0.02792989
##   0.01321941  0.03601844         NaN  0.02792989
##   0.01384886  0.03601844         NaN  0.02792989
##   0.01450829  0.03601844         NaN  0.02792989
##   0.01519911  0.03601844         NaN  0.02792989
##   0.01592283  0.03601844         NaN  0.02792989
##   0.01668101  0.03601844         NaN  0.02792989
##   0.01747528  0.03601844         NaN  0.02792989
##   0.01830738  0.03601844         NaN  0.02792989
##   0.01917910  0.03601844         NaN  0.02792989
##   0.02009233  0.03601844         NaN  0.02792989
##   0.02104904  0.03601844         NaN  0.02792989
##   0.02205131  0.03601844         NaN  0.02792989
##   0.02310130  0.03601844         NaN  0.02792989
##   0.02420128  0.03601844         NaN  0.02792989
##   0.02535364  0.03601844         NaN  0.02792989
##   0.02656088  0.03601844         NaN  0.02792989
##   0.02782559  0.03601844         NaN  0.02792989
##   0.02915053  0.03601844         NaN  0.02792989
##   0.03053856  0.03601844         NaN  0.02792989
##   0.03199267  0.03601844         NaN  0.02792989
##   0.03351603  0.03601844         NaN  0.02792989
##   0.03511192  0.03601844         NaN  0.02792989
##   0.03678380  0.03601844         NaN  0.02792989
##   0.03853529  0.03601844         NaN  0.02792989
##   0.04037017  0.03601844         NaN  0.02792989
##   0.04229243  0.03601844         NaN  0.02792989
##   0.04430621  0.03601844         NaN  0.02792989
##   0.04641589  0.03601844         NaN  0.02792989
##   0.04862602  0.03601844         NaN  0.02792989
##   0.05094138  0.03601844         NaN  0.02792989
##   0.05336699  0.03601844         NaN  0.02792989
##   0.05590810  0.03601844         NaN  0.02792989
##   0.05857021  0.03601844         NaN  0.02792989
##   0.06135907  0.03601844         NaN  0.02792989
##   0.06428073  0.03601844         NaN  0.02792989
##   0.06734151  0.03601844         NaN  0.02792989
##   0.07054802  0.03601844         NaN  0.02792989
##   0.07390722  0.03601844         NaN  0.02792989
##   0.07742637  0.03601844         NaN  0.02792989
##   0.08111308  0.03601844         NaN  0.02792989
##   0.08497534  0.03601844         NaN  0.02792989
##   0.08902151  0.03601844         NaN  0.02792989
##   0.09326033  0.03601844         NaN  0.02792989
##   0.09770100  0.03601844         NaN  0.02792989
##   0.10235310  0.03601844         NaN  0.02792989
##   0.10722672  0.03601844         NaN  0.02792989
##   0.11233240  0.03601844         NaN  0.02792989
##   0.11768120  0.03601844         NaN  0.02792989
##   0.12328467  0.03601844         NaN  0.02792989
##   0.12915497  0.03601844         NaN  0.02792989
##   0.13530478  0.03601844         NaN  0.02792989
##   0.14174742  0.03601844         NaN  0.02792989
##   0.14849683  0.03601844         NaN  0.02792989
##   0.15556761  0.03601844         NaN  0.02792989
##   0.16297508  0.03601844         NaN  0.02792989
##   0.17073526  0.03601844         NaN  0.02792989
##   0.17886495  0.03601844         NaN  0.02792989
##   0.18738174  0.03601844         NaN  0.02792989
##   0.19630407  0.03601844         NaN  0.02792989
##   0.20565123  0.03601844         NaN  0.02792989
##   0.21544347  0.03601844         NaN  0.02792989
##   0.22570197  0.03601844         NaN  0.02792989
##   0.23644894  0.03601844         NaN  0.02792989
##   0.24770764  0.03601844         NaN  0.02792989
##   0.25950242  0.03601844         NaN  0.02792989
##   0.27185882  0.03601844         NaN  0.02792989
##   0.28480359  0.03601844         NaN  0.02792989
##   0.29836472  0.03601844         NaN  0.02792989
##   0.31257158  0.03601844         NaN  0.02792989
##   0.32745492  0.03601844         NaN  0.02792989
##   0.34304693  0.03601844         NaN  0.02792989
##   0.35938137  0.03601844         NaN  0.02792989
##   0.37649358  0.03601844         NaN  0.02792989
##   0.39442061  0.03601844         NaN  0.02792989
##   0.41320124  0.03601844         NaN  0.02792989
##   0.43287613  0.03601844         NaN  0.02792989
##   0.45348785  0.03601844         NaN  0.02792989
##   0.47508102  0.03601844         NaN  0.02792989
##   0.49770236  0.03601844         NaN  0.02792989
##   0.52140083  0.03601844         NaN  0.02792989
##   0.54622772  0.03601844         NaN  0.02792989
##   0.57223677  0.03601844         NaN  0.02792989
##   0.59948425  0.03601844         NaN  0.02792989
##   0.62802914  0.03601844         NaN  0.02792989
##   0.65793322  0.03601844         NaN  0.02792989
##   0.68926121  0.03601844         NaN  0.02792989
##   0.72208090  0.03601844         NaN  0.02792989
##   0.75646333  0.03601844         NaN  0.02792989
##   0.79248290  0.03601844         NaN  0.02792989
##   0.83021757  0.03601844         NaN  0.02792989
##   0.86974900  0.03601844         NaN  0.02792989
##   0.91116276  0.03601844         NaN  0.02792989
##   0.95454846  0.03601844         NaN  0.02792989
##   1.00000000  0.03601844         NaN  0.02792989
## 
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.01.
## debug at <text>#183: print(plot(model.caret))

## debug at <text>#184: print(model.caret$bestTune)
##   alpha lambda
## 1     1   0.01
## debug at <text>#186: print(model.caret$results)
##     alpha     lambda       RMSE   Rsquared        MAE      RMSESD
## 1       1 0.01000000 0.03541645 0.11297848 0.02749408 0.001278449
## 2       1 0.01047616 0.03555384 0.11297848 0.02759347 0.001280000
## 3       1 0.01097499 0.03570400 0.11297848 0.02770139 0.001282188
## 4       1 0.01149757 0.03586807 0.11297848 0.02782082 0.001285090
## 5       1 0.01204504 0.03600398 0.07466676 0.02791938 0.001269975
## 6       1 0.01261857 0.03601844        NaN 0.02792989 0.001269338
## 7       1 0.01321941 0.03601844        NaN 0.02792989 0.001269338
## 8       1 0.01384886 0.03601844        NaN 0.02792989 0.001269338
## 9       1 0.01450829 0.03601844        NaN 0.02792989 0.001269338
## 10      1 0.01519911 0.03601844        NaN 0.02792989 0.001269338
## 11      1 0.01592283 0.03601844        NaN 0.02792989 0.001269338
## 12      1 0.01668101 0.03601844        NaN 0.02792989 0.001269338
## 13      1 0.01747528 0.03601844        NaN 0.02792989 0.001269338
## 14      1 0.01830738 0.03601844        NaN 0.02792989 0.001269338
## 15      1 0.01917910 0.03601844        NaN 0.02792989 0.001269338
## 16      1 0.02009233 0.03601844        NaN 0.02792989 0.001269338
## 17      1 0.02104904 0.03601844        NaN 0.02792989 0.001269338
## 18      1 0.02205131 0.03601844        NaN 0.02792989 0.001269338
## 19      1 0.02310130 0.03601844        NaN 0.02792989 0.001269338
## 20      1 0.02420128 0.03601844        NaN 0.02792989 0.001269338
## 21      1 0.02535364 0.03601844        NaN 0.02792989 0.001269338
## 22      1 0.02656088 0.03601844        NaN 0.02792989 0.001269338
## 23      1 0.02782559 0.03601844        NaN 0.02792989 0.001269338
## 24      1 0.02915053 0.03601844        NaN 0.02792989 0.001269338
## 25      1 0.03053856 0.03601844        NaN 0.02792989 0.001269338
## 26      1 0.03199267 0.03601844        NaN 0.02792989 0.001269338
## 27      1 0.03351603 0.03601844        NaN 0.02792989 0.001269338
## 28      1 0.03511192 0.03601844        NaN 0.02792989 0.001269338
## 29      1 0.03678380 0.03601844        NaN 0.02792989 0.001269338
## 30      1 0.03853529 0.03601844        NaN 0.02792989 0.001269338
## 31      1 0.04037017 0.03601844        NaN 0.02792989 0.001269338
## 32      1 0.04229243 0.03601844        NaN 0.02792989 0.001269338
## 33      1 0.04430621 0.03601844        NaN 0.02792989 0.001269338
## 34      1 0.04641589 0.03601844        NaN 0.02792989 0.001269338
## 35      1 0.04862602 0.03601844        NaN 0.02792989 0.001269338
## 36      1 0.05094138 0.03601844        NaN 0.02792989 0.001269338
## 37      1 0.05336699 0.03601844        NaN 0.02792989 0.001269338
## 38      1 0.05590810 0.03601844        NaN 0.02792989 0.001269338
## 39      1 0.05857021 0.03601844        NaN 0.02792989 0.001269338
## 40      1 0.06135907 0.03601844        NaN 0.02792989 0.001269338
## 41      1 0.06428073 0.03601844        NaN 0.02792989 0.001269338
## 42      1 0.06734151 0.03601844        NaN 0.02792989 0.001269338
## 43      1 0.07054802 0.03601844        NaN 0.02792989 0.001269338
## 44      1 0.07390722 0.03601844        NaN 0.02792989 0.001269338
## 45      1 0.07742637 0.03601844        NaN 0.02792989 0.001269338
## 46      1 0.08111308 0.03601844        NaN 0.02792989 0.001269338
## 47      1 0.08497534 0.03601844        NaN 0.02792989 0.001269338
## 48      1 0.08902151 0.03601844        NaN 0.02792989 0.001269338
## 49      1 0.09326033 0.03601844        NaN 0.02792989 0.001269338
## 50      1 0.09770100 0.03601844        NaN 0.02792989 0.001269338
## 51      1 0.10235310 0.03601844        NaN 0.02792989 0.001269338
## 52      1 0.10722672 0.03601844        NaN 0.02792989 0.001269338
## 53      1 0.11233240 0.03601844        NaN 0.02792989 0.001269338
## 54      1 0.11768120 0.03601844        NaN 0.02792989 0.001269338
## 55      1 0.12328467 0.03601844        NaN 0.02792989 0.001269338
## 56      1 0.12915497 0.03601844        NaN 0.02792989 0.001269338
## 57      1 0.13530478 0.03601844        NaN 0.02792989 0.001269338
## 58      1 0.14174742 0.03601844        NaN 0.02792989 0.001269338
## 59      1 0.14849683 0.03601844        NaN 0.02792989 0.001269338
## 60      1 0.15556761 0.03601844        NaN 0.02792989 0.001269338
## 61      1 0.16297508 0.03601844        NaN 0.02792989 0.001269338
## 62      1 0.17073526 0.03601844        NaN 0.02792989 0.001269338
## 63      1 0.17886495 0.03601844        NaN 0.02792989 0.001269338
## 64      1 0.18738174 0.03601844        NaN 0.02792989 0.001269338
## 65      1 0.19630407 0.03601844        NaN 0.02792989 0.001269338
## 66      1 0.20565123 0.03601844        NaN 0.02792989 0.001269338
## 67      1 0.21544347 0.03601844        NaN 0.02792989 0.001269338
## 68      1 0.22570197 0.03601844        NaN 0.02792989 0.001269338
## 69      1 0.23644894 0.03601844        NaN 0.02792989 0.001269338
## 70      1 0.24770764 0.03601844        NaN 0.02792989 0.001269338
## 71      1 0.25950242 0.03601844        NaN 0.02792989 0.001269338
## 72      1 0.27185882 0.03601844        NaN 0.02792989 0.001269338
## 73      1 0.28480359 0.03601844        NaN 0.02792989 0.001269338
## 74      1 0.29836472 0.03601844        NaN 0.02792989 0.001269338
## 75      1 0.31257158 0.03601844        NaN 0.02792989 0.001269338
## 76      1 0.32745492 0.03601844        NaN 0.02792989 0.001269338
## 77      1 0.34304693 0.03601844        NaN 0.02792989 0.001269338
## 78      1 0.35938137 0.03601844        NaN 0.02792989 0.001269338
## 79      1 0.37649358 0.03601844        NaN 0.02792989 0.001269338
## 80      1 0.39442061 0.03601844        NaN 0.02792989 0.001269338
## 81      1 0.41320124 0.03601844        NaN 0.02792989 0.001269338
## 82      1 0.43287613 0.03601844        NaN 0.02792989 0.001269338
## 83      1 0.45348785 0.03601844        NaN 0.02792989 0.001269338
## 84      1 0.47508102 0.03601844        NaN 0.02792989 0.001269338
## 85      1 0.49770236 0.03601844        NaN 0.02792989 0.001269338
## 86      1 0.52140083 0.03601844        NaN 0.02792989 0.001269338
## 87      1 0.54622772 0.03601844        NaN 0.02792989 0.001269338
## 88      1 0.57223677 0.03601844        NaN 0.02792989 0.001269338
## 89      1 0.59948425 0.03601844        NaN 0.02792989 0.001269338
## 90      1 0.62802914 0.03601844        NaN 0.02792989 0.001269338
## 91      1 0.65793322 0.03601844        NaN 0.02792989 0.001269338
## 92      1 0.68926121 0.03601844        NaN 0.02792989 0.001269338
## 93      1 0.72208090 0.03601844        NaN 0.02792989 0.001269338
## 94      1 0.75646333 0.03601844        NaN 0.02792989 0.001269338
## 95      1 0.79248290 0.03601844        NaN 0.02792989 0.001269338
## 96      1 0.83021757 0.03601844        NaN 0.02792989 0.001269338
## 97      1 0.86974900 0.03601844        NaN 0.02792989 0.001269338
## 98      1 0.91116276 0.03601844        NaN 0.02792989 0.001269338
## 99      1 0.95454846 0.03601844        NaN 0.02792989 0.001269338
## 100     1 1.00000000 0.03601844        NaN 0.02792989 0.001269338
##     RsquaredSD        MAESD
## 1   0.03258230 0.0007507719
## 2   0.03258230 0.0007568275
## 3   0.03258230 0.0007640729
## 4   0.03258230 0.0007721991
## 5   0.01220051 0.0007684911
## 6           NA 0.0007666779
## 7           NA 0.0007666779
## 8           NA 0.0007666779
## 9           NA 0.0007666779
## 10          NA 0.0007666779
## 11          NA 0.0007666779
## 12          NA 0.0007666779
## 13          NA 0.0007666779
## 14          NA 0.0007666779
## 15          NA 0.0007666779
## 16          NA 0.0007666779
## 17          NA 0.0007666779
## 18          NA 0.0007666779
## 19          NA 0.0007666779
## 20          NA 0.0007666779
## 21          NA 0.0007666779
## 22          NA 0.0007666779
## 23          NA 0.0007666779
## 24          NA 0.0007666779
## 25          NA 0.0007666779
## 26          NA 0.0007666779
## 27          NA 0.0007666779
## 28          NA 0.0007666779
## 29          NA 0.0007666779
## 30          NA 0.0007666779
## 31          NA 0.0007666779
## 32          NA 0.0007666779
## 33          NA 0.0007666779
## 34          NA 0.0007666779
## 35          NA 0.0007666779
## 36          NA 0.0007666779
## 37          NA 0.0007666779
## 38          NA 0.0007666779
## 39          NA 0.0007666779
## 40          NA 0.0007666779
## 41          NA 0.0007666779
## 42          NA 0.0007666779
## 43          NA 0.0007666779
## 44          NA 0.0007666779
## 45          NA 0.0007666779
## 46          NA 0.0007666779
## 47          NA 0.0007666779
## 48          NA 0.0007666779
## 49          NA 0.0007666779
## 50          NA 0.0007666779
## 51          NA 0.0007666779
## 52          NA 0.0007666779
## 53          NA 0.0007666779
## 54          NA 0.0007666779
## 55          NA 0.0007666779
## 56          NA 0.0007666779
## 57          NA 0.0007666779
## 58          NA 0.0007666779
## 59          NA 0.0007666779
## 60          NA 0.0007666779
## 61          NA 0.0007666779
## 62          NA 0.0007666779
## 63          NA 0.0007666779
## 64          NA 0.0007666779
## 65          NA 0.0007666779
## 66          NA 0.0007666779
## 67          NA 0.0007666779
## 68          NA 0.0007666779
## 69          NA 0.0007666779
## 70          NA 0.0007666779
## 71          NA 0.0007666779
## 72          NA 0.0007666779
## 73          NA 0.0007666779
## 74          NA 0.0007666779
## 75          NA 0.0007666779
## 76          NA 0.0007666779
## 77          NA 0.0007666779
## 78          NA 0.0007666779
## 79          NA 0.0007666779
## 80          NA 0.0007666779
## 81          NA 0.0007666779
## 82          NA 0.0007666779
## 83          NA 0.0007666779
## 84          NA 0.0007666779
## 85          NA 0.0007666779
## 86          NA 0.0007666779
## 87          NA 0.0007666779
## 88          NA 0.0007666779
## 89          NA 0.0007666779
## 90          NA 0.0007666779
## 91          NA 0.0007666779
## 92          NA 0.0007666779
## 93          NA 0.0007666779
## 94          NA 0.0007666779
## 95          NA 0.0007666779
## 96          NA 0.0007666779
## 97          NA 0.0007666779
## 98          NA 0.0007666779
## 99          NA 0.0007666779
## 100         NA 0.0007666779
## debug at <text>#187: model = model.caret$finalModel
## debug at <text>#189: dataPlot = model.caret$results %>% gather(key = "metric", value = "value", 
##     -lambda) %>% dplyr::filter(metric %in% c("MAE", "RMSE", "Rsquared"))
## debug at <text>#192: metricsPlot = ggplot(data = dataPlot, aes(x = lambda, y = value)) + 
##     geom_line(color = "lightblue4") + geom_point(color = "blue", 
##     alpha = 0.7, size = 0.9) + facet_wrap(~metric, ncol = 4, 
##     scales = "free_y") + theme_light()
## debug at <text>#197: plot(metricsPlot)
## Warning: Removed 95 rows containing missing values (geom_path).
## Warning: Removed 95 rows containing missing values (geom_point).

## debug at <text>#200: dataPlot = data.frame(pred = predict(model.caret, data), res = resid(model.caret))
## debug at <text>#201: residPlot = ggplot(dataPlot, aes(x = pred, y = res)) + geom_point(color = "light blue", 
##     alpha = 0.7) + geom_smooth() + theme_light()
## debug at <text>#205: plot(residPlot)
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## debug at <text>#207: residHistogram = ggplot(dataPlot, aes(x = res)) + geom_histogram(aes(y = ..density..), 
##     fill = "light blue", alpha = 1) + geom_density(color = "lightblue4") + 
##     theme_light()
## debug at <text>#211: plot(residHistogram)
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

## debug at <text>#213: print("Coefficients")
## [1] "Coefficients"
## debug at <text>#216: t = coef(model, s = model.caret$bestTune$lambda)
## debug at <text>#217: model.coef = t[which(t[, 1] != 0), ]
## debug at <text>#218: print(as.data.frame(model.coef))
##              model.coef
## (Intercept) 2.087277329
## sqrt.x18    0.004380592
## debug at <text>#219: id = NULL
## debug at <text>#220: return(list(model = model.caret, id = id, residPlot = residPlot, 
##     metricsPlot = metricsPlot))
## exiting from: train.caret.glmselect(formula = formula, data = data.train, method = "glmnet", 
##     subopt = "LASSO", feature.names = feature.names)

Test

if (algo.LASSO.caret == TRUE){
  test.model(model.LASSO.caret, data.test
             ,method = 'glmnet',subopt = "LASSO"
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.093   2.095   2.097   2.097   2.098   2.100 
## [1] "glmnet LASSO Test MSE: 0.00127974270500997"

LASSO with CV (w/ filtered train)

Train

if (algo.LASSO.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method = "glmnet"
                                   ,subopt = 'LASSO'
                                   ,feature.names = feature.names)
  model.LASSO.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.01 on full training set
## glmnet 
## 
## 5299 samples
##  240 predictor
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 4769, 4767, 4771, 4768, 4769, 4768, ... 
## Resampling results across tuning parameters:
## 
##   lambda      RMSE        Rsquared   MAE       
##   0.01000000  0.03039619  0.1530762  0.02433314
##   0.01047616  0.03055619  0.1530762  0.02444529
##   0.01097499  0.03073083  0.1530762  0.02456866
##   0.01149757  0.03092136  0.1530762  0.02470530
##   0.01204504  0.03112913  0.1530762  0.02485528
##   0.01261857  0.03118073        NaN  0.02489217
##   0.01321941  0.03118073        NaN  0.02489217
##   0.01384886  0.03118073        NaN  0.02489217
##   0.01450829  0.03118073        NaN  0.02489217
##   0.01519911  0.03118073        NaN  0.02489217
##   0.01592283  0.03118073        NaN  0.02489217
##   0.01668101  0.03118073        NaN  0.02489217
##   0.01747528  0.03118073        NaN  0.02489217
##   0.01830738  0.03118073        NaN  0.02489217
##   0.01917910  0.03118073        NaN  0.02489217
##   0.02009233  0.03118073        NaN  0.02489217
##   0.02104904  0.03118073        NaN  0.02489217
##   0.02205131  0.03118073        NaN  0.02489217
##   0.02310130  0.03118073        NaN  0.02489217
##   0.02420128  0.03118073        NaN  0.02489217
##   0.02535364  0.03118073        NaN  0.02489217
##   0.02656088  0.03118073        NaN  0.02489217
##   0.02782559  0.03118073        NaN  0.02489217
##   0.02915053  0.03118073        NaN  0.02489217
##   0.03053856  0.03118073        NaN  0.02489217
##   0.03199267  0.03118073        NaN  0.02489217
##   0.03351603  0.03118073        NaN  0.02489217
##   0.03511192  0.03118073        NaN  0.02489217
##   0.03678380  0.03118073        NaN  0.02489217
##   0.03853529  0.03118073        NaN  0.02489217
##   0.04037017  0.03118073        NaN  0.02489217
##   0.04229243  0.03118073        NaN  0.02489217
##   0.04430621  0.03118073        NaN  0.02489217
##   0.04641589  0.03118073        NaN  0.02489217
##   0.04862602  0.03118073        NaN  0.02489217
##   0.05094138  0.03118073        NaN  0.02489217
##   0.05336699  0.03118073        NaN  0.02489217
##   0.05590810  0.03118073        NaN  0.02489217
##   0.05857021  0.03118073        NaN  0.02489217
##   0.06135907  0.03118073        NaN  0.02489217
##   0.06428073  0.03118073        NaN  0.02489217
##   0.06734151  0.03118073        NaN  0.02489217
##   0.07054802  0.03118073        NaN  0.02489217
##   0.07390722  0.03118073        NaN  0.02489217
##   0.07742637  0.03118073        NaN  0.02489217
##   0.08111308  0.03118073        NaN  0.02489217
##   0.08497534  0.03118073        NaN  0.02489217
##   0.08902151  0.03118073        NaN  0.02489217
##   0.09326033  0.03118073        NaN  0.02489217
##   0.09770100  0.03118073        NaN  0.02489217
##   0.10235310  0.03118073        NaN  0.02489217
##   0.10722672  0.03118073        NaN  0.02489217
##   0.11233240  0.03118073        NaN  0.02489217
##   0.11768120  0.03118073        NaN  0.02489217
##   0.12328467  0.03118073        NaN  0.02489217
##   0.12915497  0.03118073        NaN  0.02489217
##   0.13530478  0.03118073        NaN  0.02489217
##   0.14174742  0.03118073        NaN  0.02489217
##   0.14849683  0.03118073        NaN  0.02489217
##   0.15556761  0.03118073        NaN  0.02489217
##   0.16297508  0.03118073        NaN  0.02489217
##   0.17073526  0.03118073        NaN  0.02489217
##   0.17886495  0.03118073        NaN  0.02489217
##   0.18738174  0.03118073        NaN  0.02489217
##   0.19630407  0.03118073        NaN  0.02489217
##   0.20565123  0.03118073        NaN  0.02489217
##   0.21544347  0.03118073        NaN  0.02489217
##   0.22570197  0.03118073        NaN  0.02489217
##   0.23644894  0.03118073        NaN  0.02489217
##   0.24770764  0.03118073        NaN  0.02489217
##   0.25950242  0.03118073        NaN  0.02489217
##   0.27185882  0.03118073        NaN  0.02489217
##   0.28480359  0.03118073        NaN  0.02489217
##   0.29836472  0.03118073        NaN  0.02489217
##   0.31257158  0.03118073        NaN  0.02489217
##   0.32745492  0.03118073        NaN  0.02489217
##   0.34304693  0.03118073        NaN  0.02489217
##   0.35938137  0.03118073        NaN  0.02489217
##   0.37649358  0.03118073        NaN  0.02489217
##   0.39442061  0.03118073        NaN  0.02489217
##   0.41320124  0.03118073        NaN  0.02489217
##   0.43287613  0.03118073        NaN  0.02489217
##   0.45348785  0.03118073        NaN  0.02489217
##   0.47508102  0.03118073        NaN  0.02489217
##   0.49770236  0.03118073        NaN  0.02489217
##   0.52140083  0.03118073        NaN  0.02489217
##   0.54622772  0.03118073        NaN  0.02489217
##   0.57223677  0.03118073        NaN  0.02489217
##   0.59948425  0.03118073        NaN  0.02489217
##   0.62802914  0.03118073        NaN  0.02489217
##   0.65793322  0.03118073        NaN  0.02489217
##   0.68926121  0.03118073        NaN  0.02489217
##   0.72208090  0.03118073        NaN  0.02489217
##   0.75646333  0.03118073        NaN  0.02489217
##   0.79248290  0.03118073        NaN  0.02489217
##   0.83021757  0.03118073        NaN  0.02489217
##   0.86974900  0.03118073        NaN  0.02489217
##   0.91116276  0.03118073        NaN  0.02489217
##   0.95454846  0.03118073        NaN  0.02489217
##   1.00000000  0.03118073        NaN  0.02489217
## 
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.01.

##   alpha lambda
## 1     1   0.01
##     alpha     lambda       RMSE  Rsquared        MAE       RMSESD
## 1       1 0.01000000 0.03039619 0.1530762 0.02433314 0.0006747181
## 2       1 0.01047616 0.03055619 0.1530762 0.02444529 0.0006740325
## 3       1 0.01097499 0.03073083 0.1530762 0.02456866 0.0006733880
## 4       1 0.01149757 0.03092136 0.1530762 0.02470530 0.0006728034
## 5       1 0.01204504 0.03112913 0.1530762 0.02485528 0.0006723020
## 6       1 0.01261857 0.03118073       NaN 0.02489217 0.0006640711
## 7       1 0.01321941 0.03118073       NaN 0.02489217 0.0006640711
## 8       1 0.01384886 0.03118073       NaN 0.02489217 0.0006640711
## 9       1 0.01450829 0.03118073       NaN 0.02489217 0.0006640711
## 10      1 0.01519911 0.03118073       NaN 0.02489217 0.0006640711
## 11      1 0.01592283 0.03118073       NaN 0.02489217 0.0006640711
## 12      1 0.01668101 0.03118073       NaN 0.02489217 0.0006640711
## 13      1 0.01747528 0.03118073       NaN 0.02489217 0.0006640711
## 14      1 0.01830738 0.03118073       NaN 0.02489217 0.0006640711
## 15      1 0.01917910 0.03118073       NaN 0.02489217 0.0006640711
## 16      1 0.02009233 0.03118073       NaN 0.02489217 0.0006640711
## 17      1 0.02104904 0.03118073       NaN 0.02489217 0.0006640711
## 18      1 0.02205131 0.03118073       NaN 0.02489217 0.0006640711
## 19      1 0.02310130 0.03118073       NaN 0.02489217 0.0006640711
## 20      1 0.02420128 0.03118073       NaN 0.02489217 0.0006640711
## 21      1 0.02535364 0.03118073       NaN 0.02489217 0.0006640711
## 22      1 0.02656088 0.03118073       NaN 0.02489217 0.0006640711
## 23      1 0.02782559 0.03118073       NaN 0.02489217 0.0006640711
## 24      1 0.02915053 0.03118073       NaN 0.02489217 0.0006640711
## 25      1 0.03053856 0.03118073       NaN 0.02489217 0.0006640711
## 26      1 0.03199267 0.03118073       NaN 0.02489217 0.0006640711
## 27      1 0.03351603 0.03118073       NaN 0.02489217 0.0006640711
## 28      1 0.03511192 0.03118073       NaN 0.02489217 0.0006640711
## 29      1 0.03678380 0.03118073       NaN 0.02489217 0.0006640711
## 30      1 0.03853529 0.03118073       NaN 0.02489217 0.0006640711
## 31      1 0.04037017 0.03118073       NaN 0.02489217 0.0006640711
## 32      1 0.04229243 0.03118073       NaN 0.02489217 0.0006640711
## 33      1 0.04430621 0.03118073       NaN 0.02489217 0.0006640711
## 34      1 0.04641589 0.03118073       NaN 0.02489217 0.0006640711
## 35      1 0.04862602 0.03118073       NaN 0.02489217 0.0006640711
## 36      1 0.05094138 0.03118073       NaN 0.02489217 0.0006640711
## 37      1 0.05336699 0.03118073       NaN 0.02489217 0.0006640711
## 38      1 0.05590810 0.03118073       NaN 0.02489217 0.0006640711
## 39      1 0.05857021 0.03118073       NaN 0.02489217 0.0006640711
## 40      1 0.06135907 0.03118073       NaN 0.02489217 0.0006640711
## 41      1 0.06428073 0.03118073       NaN 0.02489217 0.0006640711
## 42      1 0.06734151 0.03118073       NaN 0.02489217 0.0006640711
## 43      1 0.07054802 0.03118073       NaN 0.02489217 0.0006640711
## 44      1 0.07390722 0.03118073       NaN 0.02489217 0.0006640711
## 45      1 0.07742637 0.03118073       NaN 0.02489217 0.0006640711
## 46      1 0.08111308 0.03118073       NaN 0.02489217 0.0006640711
## 47      1 0.08497534 0.03118073       NaN 0.02489217 0.0006640711
## 48      1 0.08902151 0.03118073       NaN 0.02489217 0.0006640711
## 49      1 0.09326033 0.03118073       NaN 0.02489217 0.0006640711
## 50      1 0.09770100 0.03118073       NaN 0.02489217 0.0006640711
## 51      1 0.10235310 0.03118073       NaN 0.02489217 0.0006640711
## 52      1 0.10722672 0.03118073       NaN 0.02489217 0.0006640711
## 53      1 0.11233240 0.03118073       NaN 0.02489217 0.0006640711
## 54      1 0.11768120 0.03118073       NaN 0.02489217 0.0006640711
## 55      1 0.12328467 0.03118073       NaN 0.02489217 0.0006640711
## 56      1 0.12915497 0.03118073       NaN 0.02489217 0.0006640711
## 57      1 0.13530478 0.03118073       NaN 0.02489217 0.0006640711
## 58      1 0.14174742 0.03118073       NaN 0.02489217 0.0006640711
## 59      1 0.14849683 0.03118073       NaN 0.02489217 0.0006640711
## 60      1 0.15556761 0.03118073       NaN 0.02489217 0.0006640711
## 61      1 0.16297508 0.03118073       NaN 0.02489217 0.0006640711
## 62      1 0.17073526 0.03118073       NaN 0.02489217 0.0006640711
## 63      1 0.17886495 0.03118073       NaN 0.02489217 0.0006640711
## 64      1 0.18738174 0.03118073       NaN 0.02489217 0.0006640711
## 65      1 0.19630407 0.03118073       NaN 0.02489217 0.0006640711
## 66      1 0.20565123 0.03118073       NaN 0.02489217 0.0006640711
## 67      1 0.21544347 0.03118073       NaN 0.02489217 0.0006640711
## 68      1 0.22570197 0.03118073       NaN 0.02489217 0.0006640711
## 69      1 0.23644894 0.03118073       NaN 0.02489217 0.0006640711
## 70      1 0.24770764 0.03118073       NaN 0.02489217 0.0006640711
## 71      1 0.25950242 0.03118073       NaN 0.02489217 0.0006640711
## 72      1 0.27185882 0.03118073       NaN 0.02489217 0.0006640711
## 73      1 0.28480359 0.03118073       NaN 0.02489217 0.0006640711
## 74      1 0.29836472 0.03118073       NaN 0.02489217 0.0006640711
## 75      1 0.31257158 0.03118073       NaN 0.02489217 0.0006640711
## 76      1 0.32745492 0.03118073       NaN 0.02489217 0.0006640711
## 77      1 0.34304693 0.03118073       NaN 0.02489217 0.0006640711
## 78      1 0.35938137 0.03118073       NaN 0.02489217 0.0006640711
## 79      1 0.37649358 0.03118073       NaN 0.02489217 0.0006640711
## 80      1 0.39442061 0.03118073       NaN 0.02489217 0.0006640711
## 81      1 0.41320124 0.03118073       NaN 0.02489217 0.0006640711
## 82      1 0.43287613 0.03118073       NaN 0.02489217 0.0006640711
## 83      1 0.45348785 0.03118073       NaN 0.02489217 0.0006640711
## 84      1 0.47508102 0.03118073       NaN 0.02489217 0.0006640711
## 85      1 0.49770236 0.03118073       NaN 0.02489217 0.0006640711
## 86      1 0.52140083 0.03118073       NaN 0.02489217 0.0006640711
## 87      1 0.54622772 0.03118073       NaN 0.02489217 0.0006640711
## 88      1 0.57223677 0.03118073       NaN 0.02489217 0.0006640711
## 89      1 0.59948425 0.03118073       NaN 0.02489217 0.0006640711
## 90      1 0.62802914 0.03118073       NaN 0.02489217 0.0006640711
## 91      1 0.65793322 0.03118073       NaN 0.02489217 0.0006640711
## 92      1 0.68926121 0.03118073       NaN 0.02489217 0.0006640711
## 93      1 0.72208090 0.03118073       NaN 0.02489217 0.0006640711
## 94      1 0.75646333 0.03118073       NaN 0.02489217 0.0006640711
## 95      1 0.79248290 0.03118073       NaN 0.02489217 0.0006640711
## 96      1 0.83021757 0.03118073       NaN 0.02489217 0.0006640711
## 97      1 0.86974900 0.03118073       NaN 0.02489217 0.0006640711
## 98      1 0.91116276 0.03118073       NaN 0.02489217 0.0006640711
## 99      1 0.95454846 0.03118073       NaN 0.02489217 0.0006640711
## 100     1 1.00000000 0.03118073       NaN 0.02489217 0.0006640711
##     RsquaredSD        MAESD
## 1   0.01549603 0.0004918666
## 2   0.01549603 0.0004943108
## 3   0.01549603 0.0004965830
## 4   0.01549603 0.0004985902
## 5   0.01549603 0.0005011764
## 6           NA 0.0004955263
## 7           NA 0.0004955263
## 8           NA 0.0004955263
## 9           NA 0.0004955263
## 10          NA 0.0004955263
## 11          NA 0.0004955263
## 12          NA 0.0004955263
## 13          NA 0.0004955263
## 14          NA 0.0004955263
## 15          NA 0.0004955263
## 16          NA 0.0004955263
## 17          NA 0.0004955263
## 18          NA 0.0004955263
## 19          NA 0.0004955263
## 20          NA 0.0004955263
## 21          NA 0.0004955263
## 22          NA 0.0004955263
## 23          NA 0.0004955263
## 24          NA 0.0004955263
## 25          NA 0.0004955263
## 26          NA 0.0004955263
## 27          NA 0.0004955263
## 28          NA 0.0004955263
## 29          NA 0.0004955263
## 30          NA 0.0004955263
## 31          NA 0.0004955263
## 32          NA 0.0004955263
## 33          NA 0.0004955263
## 34          NA 0.0004955263
## 35          NA 0.0004955263
## 36          NA 0.0004955263
## 37          NA 0.0004955263
## 38          NA 0.0004955263
## 39          NA 0.0004955263
## 40          NA 0.0004955263
## 41          NA 0.0004955263
## 42          NA 0.0004955263
## 43          NA 0.0004955263
## 44          NA 0.0004955263
## 45          NA 0.0004955263
## 46          NA 0.0004955263
## 47          NA 0.0004955263
## 48          NA 0.0004955263
## 49          NA 0.0004955263
## 50          NA 0.0004955263
## 51          NA 0.0004955263
## 52          NA 0.0004955263
## 53          NA 0.0004955263
## 54          NA 0.0004955263
## 55          NA 0.0004955263
## 56          NA 0.0004955263
## 57          NA 0.0004955263
## 58          NA 0.0004955263
## 59          NA 0.0004955263
## 60          NA 0.0004955263
## 61          NA 0.0004955263
## 62          NA 0.0004955263
## 63          NA 0.0004955263
## 64          NA 0.0004955263
## 65          NA 0.0004955263
## 66          NA 0.0004955263
## 67          NA 0.0004955263
## 68          NA 0.0004955263
## 69          NA 0.0004955263
## 70          NA 0.0004955263
## 71          NA 0.0004955263
## 72          NA 0.0004955263
## 73          NA 0.0004955263
## 74          NA 0.0004955263
## 75          NA 0.0004955263
## 76          NA 0.0004955263
## 77          NA 0.0004955263
## 78          NA 0.0004955263
## 79          NA 0.0004955263
## 80          NA 0.0004955263
## 81          NA 0.0004955263
## 82          NA 0.0004955263
## 83          NA 0.0004955263
## 84          NA 0.0004955263
## 85          NA 0.0004955263
## 86          NA 0.0004955263
## 87          NA 0.0004955263
## 88          NA 0.0004955263
## 89          NA 0.0004955263
## 90          NA 0.0004955263
## 91          NA 0.0004955263
## 92          NA 0.0004955263
## 93          NA 0.0004955263
## 94          NA 0.0004955263
## 95          NA 0.0004955263
## 96          NA 0.0004955263
## 97          NA 0.0004955263
## 98          NA 0.0004955263
## 99          NA 0.0004955263
## 100         NA 0.0004955263
## Warning: Removed 95 rows containing missing values (geom_path).
## Warning: Removed 95 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

## [1] "Coefficients"
##              model.coef
## (Intercept) 2.082756530
## sqrt.x18    0.004792723

Test

if (algo.LASSO.caret == TRUE){
  test.model(model.LASSO.caret, data.test
             ,method = 'glmnet',subopt = "LASSO"
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.089   2.091   2.093   2.093   2.095   2.096 
## [1] "glmnet LASSO Test MSE: 0.0012948435613394"

LARS with CV (w/ full train)

Train

if (algo.LARS.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "lars"
                                   ,subopt = 'NULL'
                                   ,feature.names = feature.names)
  model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.414 on full training set
## Least Angle Regression 
## 
## 5584 samples
##  240 predictor
## 
## Pre-processing: centered (240), scaled (240) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5026, 5026, 5026, 5025, 5025, 5026, ... 
## Resampling results across tuning parameters:
## 
##   fraction    RMSE        Rsquared   MAE       
##   0.00000000  0.03601844        NaN  0.02792989
##   0.01010101  0.03559141  0.1129785  0.02762023
##   0.02020202  0.03521105  0.1129785  0.02734507
##   0.03030303  0.03487936  0.1131411  0.02711018
##   0.04040404  0.03459701  0.1252940  0.02690933
##   0.05050505  0.03434142  0.1386716  0.02672603
##   0.06060606  0.03409975  0.1519731  0.02654693
##   0.07070707  0.03387412  0.1641769  0.02637304
##   0.08080808  0.03365708  0.1752655  0.02620158
##   0.09090909  0.03345238  0.1839509  0.02603817
##   0.10101010  0.03326025  0.1907077  0.02588284
##   0.11111111  0.03308487  0.1962940  0.02573716
##   0.12121212  0.03291803  0.2022330  0.02559732
##   0.13131313  0.03275972  0.2073708  0.02546399
##   0.14141414  0.03261180  0.2115489  0.02533601
##   0.15151515  0.03247440  0.2149323  0.02521481
##   0.16161616  0.03234766  0.2176605  0.02509789
##   0.17171717  0.03223207  0.2198507  0.02498624
##   0.18181818  0.03213016  0.2217579  0.02488543
##   0.19191919  0.03203663  0.2237424  0.02479023
##   0.20202020  0.03195191  0.2258040  0.02470204
##   0.21212121  0.03187450  0.2277740  0.02462279
##   0.22222222  0.03180420  0.2296281  0.02454987
##   0.23232323  0.03174204  0.2312219  0.02448408
##   0.24242424  0.03169019  0.2324901  0.02442817
##   0.25252525  0.03164676  0.2335802  0.02438160
##   0.26262626  0.03161229  0.2344408  0.02434321
##   0.27272727  0.03158858  0.2349281  0.02431676
##   0.28282828  0.03157381  0.2351107  0.02429911
##   0.29292929  0.03156193  0.2352779  0.02428457
##   0.30303030  0.03155415  0.2353216  0.02427364
##   0.31313131  0.03154695  0.2354100  0.02426353
##   0.32323232  0.03154051  0.2355018  0.02425447
##   0.33333333  0.03153513  0.2355604  0.02424680
##   0.34343434  0.03152995  0.2356366  0.02423991
##   0.35353535  0.03152565  0.2356897  0.02423427
##   0.36363636  0.03152125  0.2357681  0.02422859
##   0.37373737  0.03151796  0.2358041  0.02422351
##   0.38383838  0.03151546  0.2358106  0.02421876
##   0.39393939  0.03151266  0.2358424  0.02421377
##   0.40404040  0.03151121  0.2358201  0.02420987
##   0.41414141  0.03151076  0.2357585  0.02420720
##   0.42424242  0.03151110  0.2356642  0.02420553
##   0.43434343  0.03151245  0.2355260  0.02420482
##   0.44444444  0.03151405  0.2353863  0.02420424
##   0.45454545  0.03151665  0.2352040  0.02420463
##   0.46464646  0.03152025  0.2349800  0.02420511
##   0.47474747  0.03152456  0.2347272  0.02420639
##   0.48484848  0.03152924  0.2344627  0.02420816
##   0.49494949  0.03153450  0.2341764  0.02421034
##   0.50505051  0.03154010  0.2338788  0.02421283
##   0.51515152  0.03154643  0.2335520  0.02421597
##   0.52525253  0.03155324  0.2332074  0.02421958
##   0.53535354  0.03156062  0.2328406  0.02422380
##   0.54545455  0.03156848  0.2324557  0.02422840
##   0.55555556  0.03157671  0.2320571  0.02423335
##   0.56565657  0.03158515  0.2316534  0.02423845
##   0.57575758  0.03159357  0.2312576  0.02424343
##   0.58585859  0.03160244  0.2308466  0.02424852
##   0.59595960  0.03161185  0.2304142  0.02425411
##   0.60606061  0.03162149  0.2299760  0.02426000
##   0.61616162  0.03163145  0.2295292  0.02426628
##   0.62626263  0.03164165  0.2290766  0.02427277
##   0.63636364  0.03165222  0.2286119  0.02427951
##   0.64646465  0.03166310  0.2281367  0.02428638
##   0.65656566  0.03167435  0.2276485  0.02429353
##   0.66666667  0.03168554  0.2271683  0.02430053
##   0.67676768  0.03169708  0.2266756  0.02430828
##   0.68686869  0.03170872  0.2261836  0.02431619
##   0.69696970  0.03172043  0.2256929  0.02432391
##   0.70707071  0.03173229  0.2252010  0.02433182
##   0.71717172  0.03174404  0.2247185  0.02433957
##   0.72727273  0.03175556  0.2242519  0.02434709
##   0.73737374  0.03176719  0.2237852  0.02435488
##   0.74747475  0.03177898  0.2233154  0.02436305
##   0.75757576  0.03179078  0.2228496  0.02437149
##   0.76767677  0.03180280  0.2223792  0.02438022
##   0.77777778  0.03181503  0.2219036  0.02438915
##   0.78787879  0.03182721  0.2214349  0.02439827
##   0.79797980  0.03183954  0.2209639  0.02440759
##   0.80808081  0.03185195  0.2204945  0.02441698
##   0.81818182  0.03186464  0.2200180  0.02442646
##   0.82828283  0.03187749  0.2195380  0.02443612
##   0.83838384  0.03189039  0.2190605  0.02444574
##   0.84848485  0.03190337  0.2185835  0.02445531
##   0.85858586  0.03191653  0.2181041  0.02446506
##   0.86868687  0.03192985  0.2176224  0.02447486
##   0.87878788  0.03194347  0.2171325  0.02448487
##   0.88888889  0.03195716  0.2166439  0.02449493
##   0.89898990  0.03197082  0.2161616  0.02450507
##   0.90909091  0.03198473  0.2156720  0.02451529
##   0.91919192  0.03199899  0.2151725  0.02452581
##   0.92929293  0.03201367  0.2146585  0.02453691
##   0.93939394  0.03202832  0.2141491  0.02454837
##   0.94949495  0.03204310  0.2136397  0.02455997
##   0.95959596  0.03205813  0.2131234  0.02457185
##   0.96969697  0.03207314  0.2126123  0.02458370
##   0.97979798  0.03208796  0.2121137  0.02459546
##   0.98989899  0.03210274  0.2116205  0.02460719
##   1.00000000  0.03211761  0.2111276  0.02461892
## 
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.4141414.

##     fraction
## 42 0.4141414
## Warning: Removed 1 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

## [1] "Coefficients"
##            x4            x5            x7            x8            x9 
## -2.010969e-03  1.114589e-04  7.054797e-03  7.766280e-04  3.867976e-03 
##           x10           x11           x12           x13           x14 
##  9.351068e-04  3.489430e-04 -1.198002e-04  1.475851e-06 -2.591298e-05 
##           x16           x17           x21         stat4         stat5 
##  1.279411e-03  1.592032e-03  6.954708e-04 -3.795795e-04 -9.566733e-05 
##         stat8        stat11        stat13        stat14        stat20 
##  2.133947e-04 -4.819574e-04 -3.576103e-04 -8.106834e-04 -2.631396e-04 
##        stat22        stat23        stat24        stat25        stat29 
## -1.267533e-04  4.768769e-04 -1.713158e-04 -2.297589e-04  2.895045e-04 
##        stat30        stat41        stat46        stat48        stat54 
##  2.155001e-04 -3.569309e-04  7.203385e-05  3.678395e-04 -4.805343e-05 
##        stat55        stat59        stat60        stat65        stat72 
##  1.119082e-04  1.813295e-04  6.640897e-05 -4.356266e-05  1.431508e-05 
##        stat73        stat79        stat82        stat89        stat91 
##  2.076104e-04 -9.667281e-05  2.274290e-04 -2.779264e-04 -3.862348e-04 
##        stat92        stat96        stat98       stat100       stat103 
## -4.775609e-04 -2.042922e-04  5.878029e-03  3.249619e-04 -1.777827e-04 
##       stat104       stat110       stat111       stat113       stat116 
## -1.163105e-04 -5.433635e-03 -4.263436e-05 -8.842203e-05  1.461499e-04 
##       stat128       stat144       stat146       stat147       stat148 
## -2.622275e-05  3.487755e-04 -4.111258e-04 -1.899564e-04 -4.507365e-04 
##       stat149       stat156       stat164       stat170       stat175 
## -2.731685e-04  2.185101e-04  1.882155e-04 -4.126430e-04 -1.308671e-04 
##       stat195       stat197       stat198       stat199       stat204 
##  1.280429e-05  3.905911e-05 -8.214297e-05  1.059664e-04 -3.491560e-04 
##       stat207       stat214       stat217      sqrt.x18 
##  1.088082e-04 -3.971960e-04  1.109704e-04  1.145281e-02

Test

if (algo.LARS.caret == TRUE){
  test.model(model.LARS.caret, data.test
             ,method = 'lars',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.046   2.087   2.098   2.098   2.110   2.142 
## [1] "lars  Test MSE: 0.0010420770080344"

LARS with CV (w/ filtered train)

Train

if (algo.LARS.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method = "lars"
                                   ,subopt = 'NULL'
                                   ,feature.names = feature.names)
  model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.556 on full training set
## Least Angle Regression 
## 
## 5299 samples
##  240 predictor
## 
## Pre-processing: centered (240), scaled (240) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 4769, 4767, 4771, 4768, 4769, 4768, ... 
## Resampling results across tuning parameters:
## 
##   fraction    RMSE        Rsquared   MAE       
##   0.00000000  0.03118073        NaN  0.02489217
##   0.01010101  0.03068250  0.1530762  0.02453391
##   0.02020202  0.03023538  0.1530762  0.02422002
##   0.03030303  0.02984201  0.1534408  0.02394543
##   0.04040404  0.02949217  0.1775931  0.02370694
##   0.05050505  0.02916766  0.1936677  0.02349165
##   0.06060606  0.02887466  0.2049108  0.02329659
##   0.07070707  0.02860468  0.2210010  0.02310567
##   0.08080808  0.02834362  0.2368795  0.02291516
##   0.09090909  0.02809608  0.2495241  0.02273123
##   0.10101010  0.02786362  0.2594578  0.02255511
##   0.11111111  0.02765089  0.2675088  0.02239226
##   0.12121212  0.02745003  0.2754206  0.02223748
##   0.13131313  0.02725602  0.2827864  0.02209006
##   0.14141414  0.02707343  0.2888855  0.02195039
##   0.15151515  0.02690301  0.2938539  0.02181766
##   0.16161616  0.02674499  0.2978842  0.02169326
##   0.17171717  0.02660197  0.3012243  0.02157884
##   0.18181818  0.02647074  0.3047679  0.02147164
##   0.19191919  0.02634820  0.3081472  0.02136879
##   0.20202020  0.02623942  0.3110266  0.02127455
##   0.21212121  0.02614022  0.3140459  0.02118985
##   0.22222222  0.02604959  0.3170314  0.02111498
##   0.23232323  0.02596721  0.3198130  0.02104866
##   0.24242424  0.02589013  0.3224143  0.02098712
##   0.25252525  0.02581774  0.3249267  0.02092892
##   0.26262626  0.02575301  0.3271127  0.02087594
##   0.27272727  0.02569755  0.3289457  0.02082949
##   0.28282828  0.02564924  0.3305429  0.02078884
##   0.29292929  0.02560919  0.3318134  0.02075319
##   0.30303030  0.02557555  0.3328877  0.02072219
##   0.31313131  0.02554961  0.3336937  0.02069912
##   0.32323232  0.02552908  0.3342987  0.02068162
##   0.33333333  0.02551038  0.3348722  0.02066562
##   0.34343434  0.02549323  0.3354227  0.02065120
##   0.35353535  0.02547582  0.3360431  0.02063736
##   0.36363636  0.02545880  0.3366964  0.02062395
##   0.37373737  0.02544352  0.3372806  0.02061196
##   0.38383838  0.02542879  0.3378587  0.02060003
##   0.39393939  0.02541421  0.3384457  0.02058790
##   0.40404040  0.02540122  0.3389617  0.02057658
##   0.41414141  0.02539021  0.3393813  0.02056643
##   0.42424242  0.02538033  0.3397498  0.02055672
##   0.43434343  0.02537140  0.3400798  0.02054719
##   0.44444444  0.02536270  0.3404111  0.02053723
##   0.45454545  0.02535469  0.3407162  0.02052766
##   0.46464646  0.02534807  0.3409562  0.02051895
##   0.47474747  0.02534180  0.3411853  0.02051035
##   0.48484848  0.02533666  0.3413613  0.02050271
##   0.49494949  0.02533251  0.3414896  0.02049603
##   0.50505051  0.02532903  0.3415878  0.02049028
##   0.51515152  0.02532609  0.3416649  0.02048519
##   0.52525253  0.02532360  0.3417240  0.02048084
##   0.53535354  0.02532182  0.3417519  0.02047703
##   0.54545455  0.02532088  0.3417417  0.02047422
##   0.55555556  0.02532076  0.3416931  0.02047233
##   0.56565657  0.02532079  0.3416420  0.02047052
##   0.57575758  0.02532127  0.3415727  0.02046960
##   0.58585859  0.02532195  0.3414986  0.02046898
##   0.59595960  0.02532297  0.3414127  0.02046890
##   0.60606061  0.02532432  0.3413140  0.02046956
##   0.61616162  0.02532594  0.3412058  0.02047064
##   0.62626263  0.02532780  0.3410895  0.02047181
##   0.63636364  0.02532991  0.3409647  0.02047295
##   0.64646465  0.02533230  0.3408296  0.02047411
##   0.65656566  0.02533511  0.3406764  0.02047545
##   0.66666667  0.02533810  0.3405193  0.02047662
##   0.67676768  0.02534130  0.3403571  0.02047793
##   0.68686869  0.02534483  0.3401826  0.02047940
##   0.69696970  0.02534906  0.3399769  0.02048138
##   0.70707071  0.02535377  0.3397498  0.02048397
##   0.71717172  0.02535864  0.3395174  0.02048701
##   0.72727273  0.02536369  0.3392804  0.02049031
##   0.73737374  0.02536904  0.3390328  0.02049389
##   0.74747475  0.02537465  0.3387767  0.02049768
##   0.75757576  0.02538067  0.3385046  0.02050180
##   0.76767677  0.02538722  0.3382092  0.02050605
##   0.77777778  0.02539413  0.3379011  0.02051055
##   0.78787879  0.02540146  0.3375758  0.02051523
##   0.79797980  0.02540917  0.3372354  0.02052029
##   0.80808081  0.02541699  0.3368944  0.02052567
##   0.81818182  0.02542518  0.3365396  0.02053140
##   0.82828283  0.02543381  0.3361677  0.02053739
##   0.83838384  0.02544257  0.3357947  0.02054335
##   0.84848485  0.02545157  0.3354147  0.02054934
##   0.85858586  0.02546077  0.3350286  0.02055553
##   0.86868687  0.02547045  0.3346228  0.02056218
##   0.87878788  0.02548027  0.3342137  0.02056891
##   0.88888889  0.02549019  0.3338038  0.02057563
##   0.89898990  0.02550040  0.3333841  0.02058275
##   0.90909091  0.02551100  0.3329496  0.02059034
##   0.91919192  0.02552197  0.3325018  0.02059827
##   0.92929293  0.02553321  0.3320453  0.02060655
##   0.93939394  0.02554457  0.3315875  0.02061492
##   0.94949495  0.02555614  0.3311229  0.02062354
##   0.95959596  0.02556784  0.3306575  0.02063234
##   0.96969697  0.02557970  0.3301888  0.02064137
##   0.97979798  0.02559163  0.3297215  0.02065059
##   0.98989899  0.02560349  0.3292616  0.02066000
##   1.00000000  0.02561559  0.3287947  0.02066959
## 
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.5555556.

##     fraction
## 56 0.5555556
## Warning: Removed 1 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

## [1] "Coefficients"
##            x4            x5            x7            x8            x9 
## -2.517820e-03  3.998768e-04  7.765010e-03  1.352302e-03  4.023682e-03 
##           x10           x11           x13           x14           x16 
##  1.572801e-03  8.721457e-04  3.109416e-04 -1.004441e-04  1.633226e-03 
##           x17           x19           x20           x21           x22 
##  1.869804e-03  2.289717e-05 -7.759872e-05  1.080426e-03 -3.024454e-04 
##         stat1         stat3         stat4         stat5         stat6 
## -4.712613e-04  2.193897e-04 -8.791253e-04 -3.782156e-04 -2.570810e-04 
##        stat11        stat13        stat14        stat15        stat18 
## -7.640450e-04 -3.406497e-04 -1.411739e-03 -5.109478e-04 -5.114726e-06 
##        stat19        stat23        stat24        stat25        stat26 
##  1.509714e-05  4.589814e-04 -3.189369e-04 -1.467846e-04 -1.056371e-04 
##        stat29        stat30        stat34        stat35        stat38 
##  4.864789e-04  2.206276e-04  3.564885e-04 -6.765722e-05  1.362208e-04 
##        stat39        stat41        stat44        stat45        stat46 
## -1.298662e-04 -4.384835e-04  2.448255e-04 -1.693081e-04  1.437306e-05 
##        stat48        stat50        stat51        stat52        stat53 
##  4.864743e-04  3.408434e-04  2.284408e-05  2.887550e-06 -1.684728e-04 
##        stat54        stat55        stat59        stat60        stat62 
## -4.779687e-04  1.299773e-04  4.305526e-04  3.971918e-04 -4.327827e-04 
##        stat63        stat67        stat72        stat73        stat78 
##  4.873320e-05  4.446392e-05  6.278614e-05  4.154863e-04 -1.451823e-04 
##        stat79        stat82        stat84        stat89        stat90 
## -8.629070e-05  3.362317e-05 -3.598343e-05 -2.049133e-05 -1.303189e-04 
##        stat91        stat92        stat93        stat95        stat96 
## -4.193606e-04 -5.335259e-04 -1.952390e-04  1.090738e-04 -3.021265e-04 
##        stat97        stat98        stat99       stat100       stat103 
##  8.783427e-05  5.636822e-03  2.976655e-04  6.048295e-04 -2.954603e-04 
##       stat104       stat106       stat109       stat110       stat111 
## -5.677323e-05 -1.327182e-04 -1.410934e-04 -5.585354e-03 -4.416201e-05 
##       stat113       stat114       stat115       stat116       stat128 
## -7.044101e-05  4.953235e-04  7.665280e-05  2.120541e-04 -5.531621e-04 
##       stat131       stat132       stat133       stat136       stat137 
##  9.456614e-05 -6.782008e-05  2.587256e-04 -3.513926e-04  4.026340e-05 
##       stat141       stat144       stat145       stat146       stat147 
##  4.343462e-04  4.261235e-04 -1.392547e-04 -6.317323e-04 -2.979232e-04 
##       stat148       stat149       stat153       stat154       stat155 
## -5.120468e-04 -6.870339e-04  1.699392e-04  7.978229e-05  9.460994e-06 
##       stat156       stat161       stat167       stat170       stat172 
##  2.350394e-04  8.057889e-06 -6.800282e-05 -5.086946e-04  5.886395e-04 
##       stat175       stat177       stat181       stat184       stat188 
## -2.252217e-04 -3.874828e-04  2.091867e-04  4.917933e-05  1.605646e-04 
##       stat198       stat199       stat203       stat204       stat207 
## -5.230995e-05  1.804489e-04  9.543121e-05 -1.957653e-04  3.132167e-04 
##       stat210       stat214       stat215       stat217      sqrt.x18 
## -1.456443e-04 -1.538144e-04 -3.449940e-05  3.615035e-05  1.176034e-02

Test

if (algo.LARS.caret == TRUE){
  test.model(model.LARS.caret, data.test
             ,method = 'lars',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.037   2.083   2.095   2.095   2.107   2.145 
## [1] "lars  Test MSE: 0.00104732287718346"